Search is not available for this dataset
content
stringlengths 60
399M
| max_stars_repo_name
stringlengths 6
110
|
---|---|
<|start_filename|>public/stylesheets/styles.css<|end_filename|>
html {
font-size: 100%
}
body {
background: #fff;
font-family: roboto, sans-serif;
font-size: 14px;
font-weight: 300;
color: #313131;
line-height: 28px;
text-align: center;
overflow-x: hidden!important;
margin: auto!important
}
.clear {
clear: both
}
pre {
white-space: normal
}
@-webkit-viewport {
width: device-width;
}
@-moz-viewport {
width: device-width;
}
@-ms-viewport {
width: device-width;
}
@-o-viewport {
width: device-width;
}
@viewport {
width: device-width;
}
a {
-webkit-transition: all ease .25s;
transition: all ease .25s
}
a:hover {
text-decoration: none
}
.btn:focus,
.btn:active {
outline: inherit
}
*,
*:before,
*:after {
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box
}
ul,
ol {
padding-left: 0!important
}
li {
list-style: none
}
.preloader {
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: #fefefe;
z-index: 99999;
height: 100%;
width: 100%;
overflow: hidden!important
}
.status {
width: 40px;
height: 40px;
position: absolute;
left: 50%;
top: 50%;
background-image: url(../img/loading.gif);
background-repeat: no-repeat;
background-position: center;
-webkit-background-size: cover;
background-size: cover;
margin: -20px 0 0 -20px
}
.align-left {
text-align: left
}
.row {
clear: both;
position: relative
}
.sponsor {
margin: 0 auto
}
.form-control {
height: auto
}
@font-face {
font-family: lane;
src: url(../fonts/lanenar_Lane.eot);
src: url(../fonts/lanenar_Laned41d.eot?#iefix) format('embedded-opentype'), url(../fonts/lanenar_Lane.woff) format('woff'), url(../fonts/lanenar_Lane.ttf) format('truetype'), url(../fonts/lanenar_Lane.svg#Lane) format('svg');
font-weight: 400;
font-style: normal
}
@media screen and (-webkit-min-device-pixel-ratio:0) {
@font-face {
font-family: lane;
src: url(../fonts/lanenar_Lane.svg#Lane) format('svg')
}
}
h1,
h2 {
font-family: lane, sans-serif;
font-weight: 400
}
h1 {
font-size: 3.75rem;
line-height: 4.688rem
}
.download h1 {
font-size: 2em;
line-height: 2rem
}
h2 {
font-size: 3.125rem;
line-height: 4.063rem
}
h3 {
font-size: 24px;
line-height: 38px;
font-weight: 300
}
h4 {
color: #005093;
font-size: 20px;
line-height: 30px;
font-weight: 300
}
h5 {
font-size: 18px;
font-weight: 300
}
.link {
cursor: pointer
}
.standard-button,
.standard-button2 {
font-size: 18px;
font-weight: 400!important;
border-radius: 4px!important;
text-shadow: 0!important;
color: #fff;
min-width: 150px;
border: 0;
padding: 16px 25px;
margin: 5px;
-webkit-transition: all ease .25s;
transition: all ease .25s
}
.standard-button:hover,
.standard-button2:hover {
border: 0
}
.standard-button i,
.standard-button2 i {
vertical-align: inherit;
margin-right: 8px;
font-size: 20px
}
.slow-spin {
-webkit-animation: fa-spin 6s infinite linear;
animation: fa-spin 6s infinite linear
}
.fast-spin {
-webkit-animation: fa-spin 1s infinite linear;
animation: fa-spin 1s infinite linear
}
.white-text {
color: #fff
}
.dark-text {
color: #272727
}
.grey-bg {
background: #f7f8fa!important
}
.section-header {
padding-bottom: 78px
}
.section-header h2 {
margin-bottom: 20px
}
.section-header .section-description {
display: inline-block;
position: relative;
text-align: center;
padding-top: 10px;
padding-bottom: 10px
}
.wrapper {
padding-bottom: 50px;
padding-top: 50px
}
@media(max-width:420px) {
.nav-open .wrapper {
padding-top: 220px
}
}
@media(max-width:480px) {
.nav-open .navbar-collapse {
background-color: rgba(0, 0, 0, .9)
}
}
.spacer {
margin-bottom: 50px;
height: 10px
}
.colored-line {
margin: auto;
z-index: 1;
width: 165px;
height: 1px
}
.white-line {
margin: auto;
z-index: 1;
width: 165px;
height: 1px;
background: #fff
}
.colored-line-left {
z-index: 1;
display: block;
width: 165px;
height: 1px;
margin-bottom: 40px!important
}
.navbar {
margin-bottom: 0;
width: 100%;
position: absolute;
z-index: 999
}
.navbar a {
color: #fff;
cursor: pointer
}
.navbar-nav>li>a {
padding: 10px 14px
}
.user .navbar a,
.user .nav-mobile-toggle,
.pages .navbar a,
.pages .nav-mobile-toggle,
.account .navbar a,
.account .nav-mobile-toggle,
.login .navbar a,
.login .nav-mobile-toggle {
color: #333
}
.nav>li>a:hover,
.nav>li>a:focus {
background-color: transparent
}
.navbar a:hover {
background-color: transparent
}
.navbar-nav {
float: right
}
.nav>li {
display: inline-block
}
.nav-mobile-toggle {
display: none
}
@media(max-width:480px) {
.nav-open .nav-mobile-toggle {
color: #fff
}
.container>.navbar-header,
.container-fluid>.navbar-header,
.container>.navbar-collapse,
.container-fluid>.navbar-collapse {
margin-right: -16px;
margin-left: -16px
}
.nav-mobile-toggle {
display: block;
color: #fff;
float: right;
padding: 12px;
cursor: pointer
}
.navbar-nav {
display: none;
width: 100%;
margin: 0 0 20px
}
.navbar-nav.open {
display: block
}
.nav>li {
display: block;
text-align: center
}
.nav>li a {
color: #fff!important
}
.nav>li a label {
left: auto!important;
padding-left: 5px
}
}
.sticky-navigation {
top: -60px;
min-height: 60px;
background: #fff;
border: 0;
padding: 0!important;
margin-bottom: 0!important;
-webkit-backface-visibility: hidden;
-webkit-box-shadow: 0 2px 8px 0 rgba(50, 50, 50, .08);
box-shadow: 0 2px 8px 0 rgba(50, 50, 50, .08)
}
.sticky-navigation .navbar-header img {
max-height: 30px;
-webkit-backface-visibility: hidden
}
.sticky-navigation .main-navigation .current a {
color: #000!important;
position: relative;
outline: 0
}
.sticky-navigation .main-navigation li a {
line-height: 30px;
font-weight: 400;
font-size: 13px;
text-transform: uppercase
}
.sticky-navigation .main-navigation li a:hover {
color: #000
}
.wrapper-404 {
margin: 20px 0
}
.header.solid-color {
padding: 0;
background-image: url(https://source.unsplash.com/random/800x600);
background-size: cover;
background-repeat: no-repeat;
background-color: #fff
}
#Branded.header.solid-color {
background-image: none;
min-height: 1200px;
padding-top: 100px
}
#download .solid-color {}
.home .color-overlay {
padding-top: 40px
}
.color-overlay {
background: rgba(0, 0, 0, .55);
padding: 130px 0
}
.solid-color * {}
.subscription-form-container * {
color: #fff
}
.header.solid-color a,
.header.solid-color p {
color: #fff
}
.header .full-screen {
width: 100%;
height: 100px
}
.navbar-inverse .navbar-toggle {
border-color: #313131;
background: #313131
}
.only-logo {
background: 0 0;
padding: 15px 0 30px
}
.only-logo .navbar-header {
float: none
}
.header .intro-section {
margin: auto;
margin-bottom: 100px;
padding-top: 0%
}
.download .header .intro-section {
margin-bottom: 20px
}
.header#download #file {
margin-top: 50px
}
.header .intro-section .intro {
color: #fff;
margin-bottom: 50px
}
.header #main-title {
margin-bottom: 20px
}
.download .intro-section .intro {
margin-bottom: 5px
}
.download #file {
margin-top: 25px
}
.download #file .details {
text-overflow: ellipsis;
overflow: hidden
}
.download .hero-image {
margin-bottom: 50px
}
.download .hero-image img {
width: 100%;
max-width: 700px;
border: 10px solid #fff
}
.header .intro-section h5 {
color: #fff
}
.header .intro-section .buttons {
margin-top: 35px
}
.features {
padding-top: 80px;
padding-bottom: 30px;
background: #fff
}
.features .features-left {
text-align: right;
margin-top: 26px;
margin-bottom: 0
}
.features .features-left .icon-container {
float: right;
margin-left: 20px
}
.features .features-right {
text-align: left;
margin-top: 26px;
margin-bottom: 0
}
.features .features-right .icon-container {
float: left;
margin-right: 20px
}
.features .feature {
margin-bottom: 40px
}
.features .feature .icon-container {
display: block;
min-height: 120px;
margin-top: 5px
}
.features .feature .icon-container .icon {
width: 55px;
height: 55px;
border-radius: 50%;
line-height: 55px;
font-size: 30px;
text-align: center;
-webkit-transition: all ease-in .25s;
transition: all ease-in .25s;
-webkit-background-clip: padding-box;
-moz-background-clip: padding;
background-clip: padding-box
}
.features .feature .icon-container img {
width: 100%
}
.features .feature .icon-container .icon .fa {
line-height: 55px;
font-size: 50px
}
.features .phone-image {
margin: auto;
margin-bottom: 80px
}
.features .phone-image img {
width: 100%;
padding-top: 120px
}
.app-brief {
padding-top: 110px;
padding-bottom: 60px
}
.app-brief h2 {
margin-bottom: 20px
}
.app-brief .phone-image {
max-width: 350px;
margin: auto;
margin-bottom: 50px;
margin-top: 50px
}
.app-brief .phone-image img {
width: 100%
}
.app-brief .left-align {
text-align: left;
margin-bottom: 50px
}
.app-brief .left-align p {
display: block;
float: none;
margin-bottom: 20px
}
.app-brief .left-align .feature-list {
margin-top: 40px
}
.app-brief .left-align .feature-list li {
margin-bottom: 26px
}
.app-brief .left-align .feature-list li i {
margin-right: 10px;
vertical-align: inherit
}
.testimonials {
color: #fff
}
.testimonials .solid-color {
padding-top: 110px;
padding-bottom: 110px
}
.testimonials .feedback {
max-width: 750px;
margin: auto
}
.testimonials .feedback .image {
font-size: 24px;
border: 2px solid transparent;
-webkit-box-shadow: 0 0 0 2px #fff;
box-shadow: 0 0 0 2px #fff;
border-radius: 50%;
width: 80px;
height: 80px;
line-height: 80px;
margin: auto;
margin-top: 5px;
overflow: hidden
}
.testimonials .feedback .image img {
width: 100%;
vertical-align: top
}
.testimonials .feedback .message {
font-size: 18px;
font-style: italic;
margin-top: 40px;
margin-bottom: 30px
}
.testimonials .feedback .name {
margin-top: 15px;
font-weight: 400
}
.testimonials .feedback .company-info {
font-size: 12px
}
.testimonials .owl-theme .owl-controls .owl-page span {
background: #fff;
border-radius: 50%
}
.testimonials .customNavigation a {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
-webkit-tap-highlight-color: transparent
}
.services {
padding-top: 0;
padding-bottom: 60px;
background: #fdfdfd
}
.services .single-service {
padding-bottom: 60px
}
.services .single-service .service-icon {
font-size: 80px
}
.services .single-service h3 {
margin-top: 20px
}
.screenshots {
padding-top: 80px;
padding-bottom: 120px
}
.screenshots .shot {
background: #fff;
padding: 10px;
margin: 13px;
border-radius: 4px;
-webkit-box-shadow: none;
box-shadow: none;
display: block
}
.screenshots .shot img {
border-radius: 4px;
width: 100%
}
.modal-backdrop.in {
opacity: .7
}
@media(min-width:764px) {
.join-modal .modal-dialog {
max-width: 1140px;
width: 90%
}
}
@media(max-width:764px) {
.modal-dialog {
margin: 30px 5px
}
}
.join-modal .modal-body {
background-color: transparent;
padding: 10px 0
}
.join-modal .modal-content {
background-color: transparent;
-webkit-box-shadow: none;
box-shadow: none;
border: 0
}
.join-modal .most-popular,
.packages .most-popular {
position: absolute;
top: -9px;
right: 9px;
width: 120px;
z-index: 99999999999
}
.packages .most-popular {
top: -9px;
right: -6px
}
.join-modal button.close {
font-weight: 700;
font-size: 40px;
color: #fff;
opacity: 1;
position: absolute;
right: 0;
top: -10px;
z-index: 99999
}
.join-modal h4,
.join-modal p {
color: #fff;
margin-bottom: 20px;
font-weight: 700
}
.packages {
padding-top: 80px;
padding-bottom: 60px;
background: #fff
}
.single-package {
background: #fff;
overflow: auto;
border: 0;
border-radius: 3px;
margin-bottom: 60px
}
.single-package .price {
background: #fff;
min-height: 83px
}
.single-package .color-bg {
background-color: !important
}
.single-package .price h2 {
color: #272727
}
.single-package .price .color-bg h2 {
color: #fff!important
}
.price .sign {
vertical-align: super;
font-size: 20px
}
.price .month {
font-size: 16px;
font-family: roboto, sans-serif;
font-weight: 300
}
.package-feature {
margin-top: 25px;
margin-bottom: 25px
}
.single-package .package-feature>li {
margin-top: 15px;
margin-bottom: 15px
}
li label {
font-size: 8px;
position: absolute;
right: -20px;
top: -10px;
font-weight: 400;
font-weight: 700
}
.single-package .package-feature>li>span {
margin-right: 7px;
position: relative
}
.single-package .package-feature>li>span.size {
font-size: 25px;
font-weight: 500
}
.pricing-5 {}
.pricing-5 div[class*=col-] {
transition: .2s linear;
-webkit-transition: .2s linear;
-moz-transition: .2s linear;
padding: 0
}
.pricing-5 div[class*=col-].pricing--emphasise {
box-shadow: 0 0 60px 0 rgba(0, 0, 0, .07);
z-index: 2
}
.pricing-5 div[class*=col-]:not(:first-child) ul li:last-child {
height: auto!important
}
.pricing-5 .pricing__title {
min-height: 182px;
padding: 1.625em 0;
position: relative
}
.pricing-5 .pricing__title h6,
.pricing-5 .pricing__title h5 {
margin: 0
}
.pricing-5 .pricing__title h6 {
opacity: .5;
font-size: 18px
}
.pricing-5 .pricing__price {
margin-top: .8125em
}
.pricing-5 ul {
margin: 0
}
.pricing-5 ul li {
padding: 15px 0
}
.pricing-5 ul li span:not(.btn__text) {
position: relative
}
.pricing-5 ul li span:not(:first-child) {
display: inline-block;
position: relative
}
.pricing-5 .tooltipped {
padding-right: 5px
}
.pricing-5 ul li span.text {
bottom: 0
}
.pricing-5 ul li:nth-child(odd) {
background: #f6f6f6
}
@media all and (max-width:990px) {
.pricing-5 .pricing__title {
border: 0
}
.pricing-5 .pricing__title {
min-height: auto
}
}
@media all and (max-width:767px) {
.pricing-5 div[class*=col-] {
margin-bottom: 1.625em
}
}
.checkmark,
.crossmark {
width: 1.625em;
height: 1.625em;
line-height: 1.625em;
background: #64b671;
border-radius: 50%;
display: inline-block;
position: relative;
text-align: center
}
.checkmark:before,
.crossmark:before {
content: '\2713';
color: #fff;
width: 100%;
opacity: 1;
font-size: 13px
}
.crossmark {
background: #e34747
}
.crossmark:before {
content: '\2715'
}
.home input#password {
text-align: center;
padding: 30px;
font-size: 18px;
width: 80%;
margin: 0 auto
}
.solid-color p {
color: #fff
}
.download .solid-color {
padding: 0
}
.download .download-container {
padding-bottom: 80px
}
.download .subscription-form-container {
padding-bottom: 20px
}
.download .subscription-form .input-box {
height: 57px;
padding-left: 20px;
width: 320px;
-webkit-box-shadow: none;
box-shadow: none;
background: #fff;
border: 2px solid #fff
}
.download .subscription-form .input-box:focus,
.download .subscription-form .input-box:active {
color: #000;
font-weight: 400
}
.download .subscription-form .subscription-success,
.download .subscription-form .subscription-error {
display: none
}
.download h2 {
margin-bottom: 30px;
color: #fff
}
footer .contact-box {
margin-bottom: 20px
}
footer .contact-box .contact-button {
height: 80px;
width: 80px;
border-radius: 50%;
margin: auto;
position: relative;
font-size: 30px;
line-height: 60px;
vertical-align: middle;
margin-top: -40px;
background: #fff;
-webkit-box-shadow: 0 0 0 3px #fff;
box-shadow: 0 0 0 3px #fff;
-webkit-backface-visibility: hidden;
-webkit-background-clip: padding-box;
-moz-background-clip: padding;
background-clip: padding-box
}
footer .contact-box .contact-button:hover {
color: #fff
}
footer .contact-box .contact-button a {
display: list-item;
list-style: none
}
footer .contact-box .contact-button:hover img {
-webkit-filter: invert(100%)
}
footer .contact-box .expanded-contact-form {
margin-top: 40px;
display: none
}
footer .contact-box .expanded-contact-form .success,
footer .contact-box .expanded-contact-form .error {
display: none
}
footer .contact-box .expanded-contact-form .input-box {
margin-bottom: 10px;
margin-top: 10px;
height: 50px;
-webkit-box-shadow: none;
box-shadow: none
}
footer .contact-box .expanded-contact-form .textarea-box {
margin-top: 20px;
margin-bottom: 20px;
-webkit-box-shadow: none;
box-shadow: none
}
footer .contact-box .expanded-contact-form .textarea-box:active {
color: #000
}
footer .social-icons {
margin-top: 30px;
border-bottom: 1px solid #e5e5e5;
padding-bottom: 50px
}
footer .social-icons li {
display: inline-block;
margin: 5px
}
footer .social-icons li a {
font-size: 30px;
color: #c2c2c2
}
footer .copyright {
color: gray;
padding-top: 10px;
margin-bottom: 50px
}
footer {
clear: both
}
.dropzone {
border: 0;
min-height: 0;
padding: 35px
}
#uf-uploader form {
clear: both;
color: #fff;
border: 2px dashed #fff
}
#uf-uploader form.dz-started {
border: 0
}
#file {
color: #fff
}
.details p {
margin: 5px 0;
font-size: 15px
}
#file .link {
font-size: 1em;
font-weight: 700;
margin: 0 0 2em
}
.hide {
display: none
}
.promo {
margin-top: 40px;
color: #fff
}
.upload-link-container {
margin: 0 auto;
margin-bottom: 15px;
max-width: 600px;
position: relative
}
.upload-link {
width: 100%;
text-align: center;
color: #333;
clear: both;
vertical-align: middle;
padding: 12px;
font-size: 30px;
line-height: 36px;
border: 0;
border-radius: 0;
-webkit-appearance: none
}
@media(max-width:480px) {
.upload-link {
font-size: 22px
}
#_hj-f5b2a1eb-9b07_feedback[data-minimized-position=middle_left] #_hj-f5b2a1eb-9b07_feedback_minimized {
bottom: 50px!important
}
}
.upload-link-container .copy {
position: absolute;
right: 0;
top: 0
}
.upload-link-container .copy,
.modal .copy {
display: table-cell;
padding: 18px 15px 19px;
line-height: 30px;
vertical-align: top;
margin-left: -6px;
border: 0;
background-color: #eee;
cursor: pointer
}
.upload-link-container .copy i {
font-size: 23px;
color: #000
}
.upload-link-container .copy img,
.modal .copy img {
width: 20px
}
#share {
clear: both
}
.video-js {
width: 100%;
max-width: 100%;
max-height: 500px
}
#edit {
margin-bottom: 20px;
font-size: 14px;
padding: 20px 0;
color: #fff;
display: none
}
#edit a {
color: #fff
}
#edit .expiration input {
margin-left: 10px;
margin-right: 5px
}
.streaming,
.expiration,
.streaming span,
.expiration span {}
.streaming span {
margin-right: 10px
}
.expiration {
margin-left: 0;
width: 100%
}
#expiredate {
text-align: center;
margin: 0 auto
}
.radio-option {
display: inline-block;
width: auto
}
.radio-option input {
margin-right: 5px!important;
margin-left: 15px!important
}
.onoffswitch {
float: left;
margin-top: 5px;
position: relative;
width: 50px;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none
}
.onoffswitch-checkbox {
display: none
}
.onoffswitch-label {
display: block;
overflow: hidden;
cursor: pointer;
height: 20px;
padding: 0;
line-height: 20px;
border: 2px solid #ccc;
border-radius: 20px;
background-color: #fff;
transition: background-color .3s ease-in
}
.onoffswitch-label:before {
content: "";
display: block;
width: 20px;
height: 20px;
margin: 0;
background: #fff;
position: absolute;
top: 0;
bottom: 0;
right: 30px;
border: 2px solid #ccc;
border-radius: 20px;
transition: all .3s ease-in 0s
}
.onoffswitch-checkbox:checked+.onoffswitch-label {
background-color: #8b31e5
}
.onoffswitch-checkbox:checked+.onoffswitch-label,
.onoffswitch-checkbox:checked+.onoffswitch-label:before {
border-color: #8b31e5
}
.onoffswitch-checkbox:checked+.onoffswitch-label:before {
right: 0
}
._hj-f5b2a1eb-9b07_widget_footer ._hj-f5b2a1eb-9b07_pull_left {
display: none
}
.file-status {
border-radius: 50%;
display: inline-block;
margin: 0 auto;
width: 10px;
height: 10px
}
.file-status.ok {
background: #69c773
}
.file-status.reported {
background: #f1f152
}
.file-status.banned {
background: #d74d2f
}
.account-uploader {
margin-bottom: -50px
}
.uploads-table {
margin-bottom: 40px;
font-weight: 300
}
.uploads-table td {
font-weight: 300
}
.bootstrap-table .fixed-table-body {
height: auto
}
.bootstrap-table .btn-sm {
padding: 10px;
min-width: 35px;
font-size: 14px
}
.bootstrap-table .actions,
.bootstrap-table .downloads {
text-align: center
}
.g-recaptcha {
display: inline-block
}
.register form,
.login form {
max-width: 500px;
margin: 0 auto;
text-align: center
}
.register form .form-control,
.login form .form-control {
text-align: center;
font-size: 24px;
height: auto
}
.register form label,
.login form label {
display: none
}
.page-header {
margin: 20px 0
}
.upload-area {
padding: 100px 0
}
.progress-bar {
max-width: 100%;
background-color: #005093
}
.refer-container {
margin-bottom: 20px
}
#referModal .upload-link {
font-size: 20px;
text-align: center;
background-color: #f5f8fb
}
.referral-links a,
#share-file a {
padding: 5px 10px;
margin: 5px 0;
display: inline-block;
font-size: 13px;
min-width: 0;
white-space: nowrap
}
@media(max-width:480px) {
.referral-links a,
#share-file a {
width: 48%
}
.referral-links a.addthis_button_linkedin,
#share-file a.addthis_button_linkedin {
display: none
}
}
.referral-links a,
#share-file a i {
font-size: 14px
}
body .referral-links a span,
body #share-file a span {
background-color: transparent!important;
line-height: 28px!important;
margin-right: 8px!important
}
body .referral-links a span svg,
body #share-file a span svg {
width: 20px!important;
height: 20px!important
}
.state-overview>div,
.state-overview section {
background-color: #f5f8fb;
margin-bottom: 0
}
body .fixed-table-body,
body .fixed-table-container,
body,
.fixed-table-body tr th,
.fixed-table-body tr td {
border: 0;
vertical-align: middle!important
}
.fixed-table-container tbody td .th-inner,
.fixed-table-container thead th .th-inner {
font-weight: 300;
font-size: 14px
}
.streaming {
display: none
}
.modal-footer {
margin-top: 0
}
.row.plan {
vertical-align: middle;
margin: 0;
padding: 20px
}
.row.plan div {
padding: 0;
margin-top: 0!important
}
.row.plan h3 {
margin: 0
}
.plan-price * {
font-size: 18px
}
.row.plan .plan-name h3 {
margin-top: 15%
}
.row.plan .free-plan * {
margin-top: 10%
}
#upgradeModal .row.plan .plan-features {
margin-top: 0
}
.row.plan .col-lg-3 {
margin-top: 20px
}
.row.plan:nth-child(odd) {
background-color: #f9f9f9
}
.upload-button {
margin-top: 20px
}
#share-file {
margin-top: 20px;
margin-bottom: 20px
}
#share-file h3 {
clear: both
}
#expiredate .pro-link {
text-decoration: underline
}
.alert {
margin: 0
}
.btn.donate {
margin-bottom: 10px
}
input[type=radio],
input[type=checkbox] {
margin-right: 8px
}
#settingsModal .modal-header {
padding: 0
}
#settingsModal .left {
text-align: right
}
#settingsModal .modal-header .close {
position: absolute;
top: 10px;
right: 10px
}
#settingsModal .right {
text-align: left
}
#settingsModal .row {
padding: 20px
}
#settingsModal input,
#settingsModal textarea,
#settingsModal select,
#settingsModal option,
#settingsModal #upload-logo {
width: 75%;
padding: 5px;
border: 1px solid #eee
}
#settingsModal #upload-logo {
color: #333;
min-height: 100px
}
#settingsModal #upload-logo .dz-preview {
margin: 0
}
#settingsModal .dz-preview .dz-details {
color: #333;
padding: 0
}
#settingsModal .dz-preview .dz-details .dz-size {
margin-bottom: 10px
}
#settingsModal .dz-preview .dz-image {
height: auto;
border-radius: 0
}
#settingsModal .dz-preview .dz-progress .dz-upload {
background: green
}
#settingsModal .dz-preview .dz-progress {
top: 80%
}
#settingsModal .delete-logo {
position: absolute;
top: -10px;
left: 5px;
border: 1px solid #ddd;
background-color: #fff;
border-radius: 50%;
width: 20px;
height: 20px;
text-align: center;
line-height: 17px;
font-size: 15px;
font-weight: 700;
cursor: pointer
}
#settingsModal .menu-item {
display: inline-block;
text-align: center;
padding: 10px;
min-width: 100px;
cursor: pointer
}
#settingsModal .menu-item i {
font-size: 20px
}
#settingsModal .menu-item p {
margin-bottom: 0
}
#settingsModal .menu-item.active,
#settingsModal .menu-item:hover {
background-color: #f9f9f9
}
#settingsModal .section {
display: none
}
#settingsModal .section.active {
display: block
}
.recent-uploads {
margin-top: 30px
}
th {
text-align: center
}
.stats {
color: #fff
}
#resend-frame {}
.interval-selector {
width: 175px;
line-height: 40px;
margin: 30px auto 0;
position: relative
}
.interval-selector .promo {
position: absolute;
right: -70px;
font-size: 10px;
top: -10px;
color: #333;
margin: 0
}
.modal .interval-selector {
position: absolute;
left: 20px;
top: 0;
color: #fff;
margin: 0
}
@media(max-width:1000px) {
.modal .interval-selector {
top: -20px
}
}
@media(max-width:750px) {
.interval-selector {
top: auto;
position: relative;
clear: both
}
.join-modal h2,
.join-modal p {
clear: both
}
}
.mute {
float: left
}
.switch {
position: relative;
display: inline-block;
width: 60px;
height: 34px;
margin: 0 10px;
float: left
}
.switch input {
display: none
}
.slider {
position: absolute;
cursor: pointer;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: #ccc;
-webkit-transition: .4s;
transition: .4s
}
.slider:before {
position: absolute;
content: "";
height: 26px;
width: 26px;
left: 4px;
bottom: 4px;
background-color: #fff;
-webkit-transition: .4s;
transition: .4s
}
input:checked+.slider {
background-color: #005093
}
input:focus+.slider {
box-shadow: 0 0 1px #005093
}
input:checked+.slider:before {
-webkit-transform: translateX(26px);
-ms-transform: translateX(26px);
transform: translateX(26px)
}
.slider.round {
border-radius: 34px
}
.slider.round:before {
border-radius: 50%
}
.tweet-example {
margin-bottom: 20px
}
.tweet-example .fa-twitter {
font-size: 40px;
position: absolute;
left: 90px
}
.bubble {
font-size: 13px;
position: relative;
padding: 15px;
background: #005093;
color: #fff;
-webkit-border-radius: 10px;
-moz-border-radius: 10px;
border-radius: 10px;
margin-left: 10px
}
.bubble:after {
content: '';
position: absolute;
border-style: solid;
border-width: 9px 11px 9px 0;
border-color: transparent #005093;
display: block;
width: 0;
z-index: 1;
left: -11px;
top: 14px
}
@media(max-width:320px) {
.bubble {
font-size: 10px
}
}
.folder .header.solid-color a {
color: #333
}
.folder h1 {
color: #fff
}
#files {
padding-bottom: 150px
}
.files {
width: 100%;
height: 100%;
padding: 10px
}
.file-wrapper {
padding: 10px
}
.file {
padding: 10px;
position: relative;
background-color: #fff;
text-align: center
}
.file.active {
background-color: #eee
}
.file-info {
font-size: 12px;
text-align: center
}
.grid .file-info span {
margin: 0 10px
}
.grid .file-info span .fa {
margin-right: 5px
}
.table .file-info {
float: right;
font-size: 14px;
width: 30%;
min-width: 200px
}
@media(max-width:320px) {
.table .file-info {
display: none
}
}
.table .file-info span {
width: 30%;
display: inline-block
}
.file img {
width: 100%;
padding: 0 35px
}
.file .name {
white-space: nowrap;
overflow: hidden;
height: 30px;
text-overflow: ellipsis;
margin-bottom: 0;
display: block
}
.table .file .name {
overflow: visible;
height: auto;
margin: 0;
padding: 0;
display: inline-block
}
.file:hover {
background-color: #f9f9f9
}
.file:hover .file-actions {
opacity: 1
}
.file .file-actions {
opacity: 1;
position: absolute;
right: 20px;
top: 10px;
cursor: pointer;
background-color: #fff;
padding: 2px 10px;
-moz-border-radius: 4px;
-webkit-border-radius: 4px;
border-radius: 4px;
-webkit-box-shadow: 0 1px 2px 0 rgba(0, 0, 0, .15);
-moz-box-shadow: 0 1px 2px 0 rgba(0, 0, 0, .15);
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, .15)
}
.table .file .file-actions {
display: block;
top: 13px;
-webkit-box-shadow: none;
-moz-box-shadow: none;
box-shadow: none;
background-color: inherit
}
.file .file-actions .dropdown-menu {
top: 20px
}
.file .file-actions .dropdown-menu .fa {
margin-right: 10px;
width: 15px;
text-align: center
}
.file .file-icon {
padding: 25px
}
.table .file .file-icon {
padding: 5px 10px 0;
font-size: 20px;
display: inline-flex
}
.file.image .file-icon:before {
content: "\f1c5";
color: #2aa08a
}
.file.audio .file-icon:before {
content: "\f1c7";
color: #7d6599
}
.file.video .file-icon:before {
content: "\f1c8";
color: #ff2d3a
}
.file.word .file-icon:before {
content: "\f1c2";
color: #31a9dd
}
.file.excel .file-icon:before {
content: "\f1c3";
color: #48b258
}
.file.pdf .file-icon:before {
content: "\f1c1";
color: #b51c30
}
.file.ppt .file-icon:before {
content: "\f1c4";
color: #dd4921
}
.file.doc .file-icon:before {
content: "\f0f6";
color: #ff5722
}
.file.archive .file-icon:before {
content: "\f1c6";
color: #393768
}
.file-wrapper .file-status-icons {
position: absolute;
left: 10px;
bottom: 10px
}
.file-wrapper .file-status-icons i {
padding: 5px
}
.file-wrapper .password-icon,
.file-wrapper .sell-icon {
display: none
}
.protected .password-icon {
display: block;
float: left
}
.sell .sell-icon {
display: block;
float: left
}
.folder .hero-image {
height: 120px;
background-size: cover
}
.ad-blocked {
border: 1px dashed #fff;
padding: 40px;
display: none
}
.ad-blocked h3 {
color: #fff;
padding: 0;
margin: 0
}
.ad-blocked * img {
margin: 0 10px
}
.at4-welcome-outer.addthis_bar.addthis_bar_fixed.addthis_bar_top.at-cv-goal-prompt {
background-color: #005093!important
}
a.addthis_bar_button.addthis_bar_action_elem.addthis_bar_button_link.at-cv-button.at-topbottom-prompt {
background-color: #2879c1:important
}
<|start_filename|>config.js<|end_filename|>
module.exports = {
dbURL: process.env.DATABASEURL || "mongodb://localhost/dropit",
port: process.env.PORT || 7000
}
<|start_filename|>models/count.js<|end_filename|>
const mongoose = require("mongoose");
const countSchema = new mongoose.Schema({
count: {
type: Number,
default: 0
}
});
module.exports = mongoose.model("Count", countSchema);
<|start_filename|>test/app.test.js<|end_filename|>
const request = require("supertest");
const app = require("../app");
const File = require("../models/file");
let fileCount = 0;
beforeAll(() => {
File.find({}).then((rFiles) => {
if(rFiles != null && rFiles.length){
return fileCount = rFiles.length;
}
}).catch((err) => {
console.log(err);
});
});
describe("GET / ", ()=>{
it("Should return status code 200", (done)=>{
request(app)
.get("/")
.expect(200)
.end(done);
});
});
describe("GET /count ", ()=>{
it("Should return the number of uploaded file", (done)=>{
request(app)
.get("/count")
.expect(200)
.expect((res)=>{
expect(res.body.data).toBe(fileCount);
})
.end(done);
});
});
describe("POST /upload ", ()=>{
it("Should upload and save the file in db", (done)=>{
request(app)
.post("/upload")
.attach("file",__dirname + "/files/test.txt")
.expect(200)
.end((err, res)=>{
if(err){
return finish(err);
}
File.find({}).then((rFiles)=>{
expect(rFiles.length).toBe(fileCount + 1);
done();
}).catch((err)=>{
done(err);
})
});
});
it("Should return 404 if no file uploaded", (done)=>{
request(app)
.post("/upload")
.expect(404)
.end(done);
});
});
<|start_filename|>public/stylesheets/purple.css<|end_filename|>
.solid-color {
background: #192a80
}
.standard-button {
background: #005093
}
.standard-button:hover {
background-color: #00396b;
color: #fff
}
.standard-button2 {
background: #005093
}
.standard-button2:hover {
background: #005093
}
.btn-primary,
.btn-primary:focus,
.btn-primary:active,
.btn-primary.active,
.open .dropdown-toggle.btn-primary {
background: #005093;
border-color: inherit
}
.colored-line,
.colored-line-left {
background: #005093
}
.main-color {
color: #005093
}
.color-bg {
background: #005093;
color: #fff
}
.features .feature .icon-container .icon {
color: #005093
}
.feature-list li i {
color: #005093
}
.services .single-service .service-icon,
.services .single-service h3 {
color: #005093
}
.screenshots .owl-theme .owl-controls .owl-page span {
background: #005093
}
footer .contact-button {
border: 2px solid #005093;
color: #005093
}
footer .contact-button:hover {
background: #005093
}
footer .contact-box .expanded-contact-form .input-box:active,
footer .contact-box .expanded-contact-form .textarea-box:active,
footer .contact-box .expanded-contact-form .input-box:focus,
footer .contact-box .expanded-contact-form .textarea-box:focus {
border: 1px solid #005093
}
footer .social-icons li a:hover {
color: #005093
}
<|start_filename|>models/file.js<|end_filename|>
const mongoose = require("mongoose");
const filesSchema = new mongoose.Schema({
name: {
type: String,
},
size: {
type: Number,
},
path_on_disk: {
type: String,
},
identifier: {
type: String
},
uploaded_date: {
type: Date,
default: Date.now()
},
downloaded: {
type: Number,
default: 0
}
});
module.exports = mongoose.model("File", filesSchema);
<|start_filename|>public/stylesheets/tooltips.css<|end_filename|>
.tooltipped {
position: relative
}
.tooltipped:after {
position: absolute;
z-index: 1000000;
display: none;
padding: 5px 8px;
font: normal normal 11px/1.5 Helvetica, arial, nimbussansl, liberationsans, freesans, clean, sans-serif, "Segoe UI Emoji", "Segoe UI Symbol";
color: #fff;
text-align: center;
text-decoration: none;
text-shadow: none;
text-transform: none;
letter-spacing: normal;
word-wrap: break-word;
white-space: pre;
pointer-events: none;
content: attr(aria-label);
background: rgba(0, 0, 0, .8);
border-radius: 3px;
-webkit-font-smoothing: subpixel-antialiased
}
.tooltipped:before {
position: absolute;
z-index: 1000001;
display: none;
width: 0;
height: 0;
color: rgba(0, 0, 0, .8);
pointer-events: none;
content: "";
border: 5px solid transparent
}
.tooltipped:hover:before,
.tooltipped:hover:after,
.tooltipped:active:before,
.tooltipped:active:after,
.tooltipped:focus:before,
.tooltipped:focus:after {
display: inline-block;
text-decoration: none
}
.tooltipped-multiline:hover:after,
.tooltipped-multiline:active:after,
.tooltipped-multiline:focus:after {
display: table-cell
}
.tooltipped-s:after,
.tooltipped-se:after,
.tooltipped-sw:after {
top: 100%;
right: 50%;
margin-top: 5px
}
.tooltipped-s:before,
.tooltipped-se:before,
.tooltipped-sw:before {
top: auto;
right: 50%;
bottom: -5px;
margin-right: -5px;
border-bottom-color: rgba(0, 0, 0, .8)
}
.tooltipped-se:after {
right: auto;
left: 50%;
margin-left: -15px
}
.tooltipped-sw:after {
margin-right: -15px
}
.tooltipped-n:after,
.tooltipped-ne:after,
.tooltipped-nw:after {
right: 50%;
bottom: 100%;
margin-bottom: 5px
}
.tooltipped-n:before,
.tooltipped-ne:before,
.tooltipped-nw:before {
top: -5px;
right: 50%;
bottom: auto;
margin-right: -5px;
border-top-color: rgba(0, 0, 0, .8)
}
.tooltipped-ne:after {
right: auto;
left: 50%;
margin-left: -15px
}
.tooltipped-nw:after {
margin-right: -15px
}
.tooltipped-s:after,
.tooltipped-n:after {
-webkit-transform: translateX(50%);
-ms-transform: translateX(50%);
transform: translateX(50%)
}
.tooltipped-w:after {
right: 100%;
bottom: 50%;
margin-right: 5px;
-webkit-transform: translateY(50%);
-ms-transform: translateY(50%);
transform: translateY(50%)
}
.tooltipped-w:before {
top: 50%;
bottom: 50%;
left: -5px;
margin-top: -5px;
border-left-color: rgba(0, 0, 0, .8)
}
.tooltipped-e:after {
bottom: 50%;
left: 100%;
margin-left: 5px;
-webkit-transform: translateY(50%);
-ms-transform: translateY(50%);
transform: translateY(50%)
}
.tooltipped-e:before {
top: 50%;
right: -5px;
bottom: 50%;
margin-top: -5px;
border-right-color: rgba(0, 0, 0, .8)
}
.tooltipped-multiline:after {
width: -webkit-max-content;
width: -moz-max-content;
width: max-content;
max-width: 250px;
word-break: break-word;
word-wrap: normal;
white-space: pre-line;
border-collapse: separate
}
.tooltipped-multiline.tooltipped-s:after,
.tooltipped-multiline.tooltipped-n:after {
right: auto;
left: 50%;
-webkit-transform: translateX(-50%);
-ms-transform: translateX(-50%);
transform: translateX(-50%)
}
.tooltipped-multiline.tooltipped-w:after,
.tooltipped-multiline.tooltipped-e:after {
right: 100%
}
@media screen and (min-width:0\0) {
.tooltipped-multiline: after {
width: 250px
}
}
.tooltipped-sticky:before,
.tooltipped-sticky:after {
display: inline-block
}
.tooltipped-sticky.tooltipped-multiline:after {
display: table-cell
}
.fullscreen-overlay-enabled.dark-theme .tooltipped:after {
color: #000;
background: rgba(255, 255, 255, .8)
}
.fullscreen-overlay-enabled.dark-theme .tooltipped .tooltipped-s:before,
.fullscreen-overlay-enabled.dark-theme .tooltipped .tooltipped-se:before,
.fullscreen-overlay-enabled.dark-theme .tooltipped .tooltipped-sw:before {
border-bottom-color: rgba(255, 255, 255, .8)
}
.fullscreen-overlay-enabled.dark-theme .tooltipped.tooltipped-n:before,
.fullscreen-overlay-enabled.dark-theme .tooltipped.tooltipped-ne:before,
.fullscreen-overlay-enabled.dark-theme .tooltipped.tooltipped-nw:before {
border-top-color: rgba(255, 255, 255, .8)
}
.fullscreen-overlay-enabled.dark-theme .tooltipped.tooltipped-e:before {
border-right-color: rgba(255, 255, 255, .8)
}
.fullscreen-overlay-enabled.dark-theme .tooltipped.tooltipped-w:before {
border-left-color: rgba(255, 255, 255, .8)
}
<|start_filename|>seedb.js<|end_filename|>
const Count = require("./models/count");
// Create a Count document
function seeDb(){
Count.create({ count: 0 }).then((rCount)=>{
console.log("Initial Count : ", rCount);
});
}
module.exports = seeDb;
<|start_filename|>app.js<|end_filename|>
const express = require("express");
const app = express();
const mongoose = require("mongoose");
const multer = require('multer');
const File = require("./models/file");
const Count = require("./models/count");
const config = require("./config");
const crypto = require('crypto');
const mime = require("mime-types");
const path = require('path');
const initialCount = require("./seedb");
const upload = multer({
storage: multer.diskStorage({
destination: path.join(__dirname, 'files/'),
filename: function (req, file, cb) {
crypto.pseudoRandomBytes(4, function (err, raw) {
const mime_type = mime.lookup(file.originalname);
// throw away any extension if provided
const nameSplit = file.originalname.split(".").slice(0,-1);
//nameSplit.pop();
// replace all white spaces with - for safe file name on different filesystem
const name = nameSplit.join(".").replace(/\s/g,'-');
cb(null, raw.toString('hex') + name + '.' + mime.extension(mime_type));
});
}
})
});
// configure app and mongoose
mongoose.Promise = global.Promise;
app.set("view engine", "ejs");
// app.set("port", process.env.PORT || 7000);
// middlewares
app.use(express.static(__dirname + "/public"));
// routes
app.get("/", (req, res) => {
File.find({}).then((rFiles) => {
let templateData = { uploadTime: 0, dTime: 0 };
if (rFiles != null && rFiles.length) {
templateData.uploadTime = rFiles.length;
}
Count.find({}).then((rCount) => {
if (rCount != null && rCount.length) {
templateData.dTime = rCount[0].count;
}else{
// Initialize db count
initialCount();
}
res.render("index", templateData);
});
}).catch(() => {
res.redirect("/");
});
});
// Ajax upload count
app.get("/count", (req, res) => {
const result = { error: "", data: "" };
File.find({}).then((rFiles) => {
result.data = rFiles != null && rFiles.length ? rFiles.length : 0;
res.send(result);
}).catch((err) => {
result.error = err;
res.send(result);
});
});
app.get("/:id", (req, res) => {
File.findOne({ identifier: req.params.id }).then((rFile) => {
if (rFile == null ) {
return res.sendStatus(404);
}
rFile.downloaded++;
rFile.save();
Count.find({}).then((rCount) => {
if (rCount != null && rCount.length) {
rCount[0].count++;
rCount[0].save();
}
});
const file = rFile.path_on_disk;
res.download(file);
}).catch(() => {
res.redirect("/");
});
});
app.post("/upload", upload.single("file"), (req, res) => {
if (req.file) {
const identifier = Math.random().toString(36).slice(2);
const data = {
url: identifier,
name: req.file.originalname,
encoding: req.file.encoding,
mimetype: req.file.mimetype,
size: req.file.size
};
const file = {
name: data.name,
size: data.size,
path_on_disk: req.file.path,
identifier: identifier,
};
File.create(file).then((rFile) => {
return res.status(200).send(data);
});
}else{
res.status(404).send();
}
});
// boot if db is available
mongoose.connect(config.dbURL, { reconnectTries: 5 })
.then(db => {
// boot
app.listen(config.port, () => {
console.log("Listening on port: ", config.port);
});
})
.catch(dbErr => {
console.log("DB Connection Error: ", dbErr.message);
process.exit(1);
});
process.once('unhandledRejection',err => {
console.log('UNHANDLED_REJECTION: ', err.stack.toString());
process.exit(1);
});
process.once('uncaughtException',err => {
console.log('UNHANDLED_EXCEPTION: ', err.stack.toString());
process.exit(1);
});
module.exports = app;
<|start_filename|>public/js/tooltips5c33.js<|end_filename|>
var btns = document.querySelectorAll('.copy');
for (var i = 0; i < btns.length; i++) {
btns[i].addEventListener('mouseleave', function(e) {
e.currentTarget.setAttribute('class', 'copy');
e.currentTarget.removeAttribute('aria-label');
});
}
function showTooltip(elem, msg) {
elem.setAttribute('class', elem.className + ' tooltipped tooltipped-s');
elem.setAttribute('aria-label', msg);
}
function fallbackMessage(action) {
var actionMsg = '';
var actionKey = (action === 'cut' ? 'X' : 'C');
if (/iPhone|iPad/i.test(navigator.userAgent)) {
actionMsg = 'No support :(';
} else if (/Mac/i.test(navigator.userAgent)) {
actionMsg = 'Press ⌘-' + actionKey + ' to ' + action;
} else {
actionMsg = 'Press Ctrl-' + actionKey + ' to ' + action;
}
return actionMsg;
} | DNCD/DropIt |
<|start_filename|>test/mocks/mockClusters.js<|end_filename|>
/*
Mock clusters to use in ClusterList view when you don't have full
/src/cc/store/Cluster objects to test with.
For example, set `mockClusters` as the `ClusterList.defaultProps.clusters` value
and then don't provide any clusters to the component's instance)
Or try this in ClusterDataProvider's `_loadData()` method:
```
const data = await mockAsyncClusters();
pr.store.data.namespaces = data.namespaces;
pr.store.data.clusters = data.clusters;
pr.loading = false;
pr.loaded = true;
pr.notifyIfError();
pr.onChange();
return;
```
*/
import { mockNamespaces } from './mockNamespaces';
export const mockClusters = [
{
id: '0',
namespace: mockNamespaces[0].name,
name: 'cluster-0',
ready: true,
deleteInProgress: false,
},
{
id: '1',
namespace: mockNamespaces[0].name,
name: 'cluster-1',
ready: true,
deleteInProgress: false,
},
{
id: '2',
namespace: mockNamespaces[0].name,
name: 'cluster-2',
ready: true,
deleteInProgress: false,
},
{
id: '3',
namespace: mockNamespaces[0].name,
name: 'cluster-3',
ready: true,
deleteInProgress: false,
},
{
id: '4',
namespace: mockNamespaces[0].name,
name: 'cluster-4-not-ready',
ready: false,
deleteInProgress: false,
},
{
id: '5',
namespace: mockNamespaces[1].name,
name: 'cluster-5',
ready: true,
deleteInProgress: false,
},
{
id: '6',
namespace: mockNamespaces[1].name,
name: 'cluster-6-not-ready',
ready: false,
deleteInProgress: false,
},
{
id: '7',
namespace: mockNamespaces[1].name,
name: 'cluster-7',
ready: true,
deleteInProgress: false,
},
{
id: '8',
namespace: mockNamespaces[1].name,
name: 'cluster-8',
ready: true,
deleteInProgress: false,
},
{
id: '9',
namespace: mockNamespaces[1].name,
name: 'cluster-9',
ready: true,
deleteInProgress: false,
},
{
id: '10',
namespace: mockNamespaces[2].name,
name: 'cluster-10-not-ready',
ready: false,
deleteInProgress: false,
},
{
id: '11',
namespace: mockNamespaces[2].name,
name: 'cluster-11',
ready: true,
deleteInProgress: false,
},
{
id: '12',
namespace: mockNamespaces[2].name,
name: 'cluster-12',
ready: true,
deleteInProgress: false,
},
{
id: '13',
namespace: mockNamespaces[2].name,
name: 'cluster-13',
ready: true,
deleteInProgress: false,
},
{
id: '14',
namespace: mockNamespaces[2].name,
name: 'cluster-14',
ready: true,
deleteInProgress: false,
},
];
/**
* [ASYNC]
* Returns `mockClusters` after a short delay to simulate a request.
* @returns {Promise<Array<Object>>} `mockClusters`.
*/
export const mockAsyncClusters = function () {
return new Promise(function (resolve) {
setTimeout(function () {
resolve({ clusters: mockClusters, namespaces: mockNamespaces });
}, 500);
});
};
<|start_filename|>src/renderer/store/ExtStateProvider.js<|end_filename|>
//
// Extension State Management (stores in Local Storage)
//
import { createContext, useContext, useState, useMemo } from 'react';
import propTypes from 'prop-types';
import * as rtv from 'rtvjs';
import { cloneDeep, cloneDeepWith } from 'lodash';
import { PreferenceStore, prefStore } from '../../store/PreferenceStore';
import { AuthAccess } from '../auth/AuthAccess';
import { ProviderStore } from './ProviderStore';
import ExtensionRenderer from '../renderer';
const extStateTs = {
prefs: [rtv.EXPECTED, rtv.CLASS_OBJECT, { ctor: PreferenceStore }],
authAccess: [rtv.EXPECTED, rtv.CLASS_OBJECT, { ctor: AuthAccess }],
};
let extension; // {ExtensionRenderer} instance reference
let extFileFolderLoading = false; // true if we're waiting for the file folder to load async
//
// Store
//
class ExtStateProviderStore extends ProviderStore {
constructor() {
super();
prefStore.addUpdateHandler(this.onStoreUpdate.bind(this));
}
// @override
makeNew() {
const newStore = {
...super.makeNew(),
prefs: prefStore, // singleton instance
authAccess: new AuthAccess(),
};
newStore.authAccess.username = prefStore.username || null;
newStore.loaded = true; // always
return newStore;
}
// @override
reset() {
this.store.prefs.reset();
super.reset();
}
// @override
clone() {
return cloneDeepWith(this.store, (value, key) => {
if (key === 'authAccess') {
// instead of letting Lodash dig deep into this object, clone it manually
return new AuthAccess(cloneDeep(value.toJSON()));
} else if (key === 'prefs') {
return value; // it's a singleton instance so just return the instance
}
// else, let Lodash do the cloning
});
}
// @override
validate() {
const result = rtv.check({ state: this.store }, { state: extStateTs });
if (!result.valid) {
throw new Error(
`[ExtStateProvider] Invalid extension state, error="${result.message}"`
);
}
}
// called whenever the pref store is updated from disk
onStoreUpdate() {
this.store.authAccess.username = this.store.prefs.username;
}
}
const pr = new ExtStateProviderStore();
//
// Internal Methods
//
//
// Provider Definition
//
const ExtStateContext = createContext();
export const useExtState = function () {
const context = useContext(ExtStateContext);
if (!context) {
throw new Error('useExtState must be used within an ExtStateProvider');
}
// NOTE: `context` is the value of the `value` prop we set on the
// <ExtStateContext.Provider value={...}/> we return as the <ExtStateProvider/>
// component to wrap all children that should have access to the state (i.e.
// all the children that will be able to `useExtState()` to access the state)
const [state] = context;
// this is what you actually get from `useExtState()` when you consume it
return {
state,
//// ACTIONS
actions: {
/** Reset the state, forgetting all data. */
reset() {
pr.reset();
},
/**
* Sets a new AuthAccess object into the store, which implicitely updates
* the `username` in the store based on `newValue.username`.
* @param {AuthAccess|null} newValue
*/
setAuthAccess(newValue) {
pr.store.authAccess = newValue;
pr.store.prefs.username = newValue ? newValue.username : null;
if (pr.store.authAccess) {
// mark it as no longer being changed if it was
pr.store.authAccess.changed = false;
}
pr.onChange();
},
/**
* Updates the MCC base URL.
* @param {string} newValue Must not end with a slash.
*/
setCloudUrl(newValue) {
pr.store.prefs.cloudUrl = newValue;
pr.onChange();
},
/**
* Updates the save path where kubeConfig files should be saved on the local system.
* @param {string} newValue Must not end with a slash.
*/
setSavePath(newValue) {
pr.store.prefs.savePath = newValue;
pr.onChange();
},
/**
* Updates the offline option when generating cluster access tokens.
* @param {boolean} newValue
*/
setOffline(newValue) {
pr.store.prefs.offline = newValue;
pr.onChange();
},
},
};
};
export const ExtStateProvider = function ({
extension: lensExtension,
...props
}) {
extension = lensExtension;
// attempt to load the special data directory path that Lens consistently assigns
// to this extension on every load (should always be the same one as long as the
// extension remains installed)
if (!PreferenceStore.defaultSavePath && !extFileFolderLoading) {
extFileFolderLoading = true;
extension
.getExtensionFileFolder()
.then((folder) => {
PreferenceStore.defaultSavePath = folder;
})
.catch(() => {
// use the extension's installation directory as a fallback, though
// this is not safe because if the extension is uninstalled, this
// directory is removed by Lens, and would result in any Kubeconfig
// files also being deleted, and therefore clusters lost in Lens
PreferenceStore.defaultSavePath = __dirname;
})
.finally(() => {
extFileFolderLoading = false;
// only use default if we didn't get a path when we loaded the pref store
if (!pr.store.prefs.savePath) {
pr.store.prefs.savePath = PreferenceStore.defaultSavePath;
pr.onChange();
}
});
}
// NOTE: since the state is passed directly (by reference) into the context
// returned by the provider, even the initial state should be a clone of the
// `store` so that we consistently return a `state` property (in the context)
// that is a shallow clone of the `store`
const [state, setState] = useState(pr.clone());
const value = useMemo(() => [state, setState], [state]);
pr.setState = setState;
return <ExtStateContext.Provider value={value} {...props} />;
};
ExtStateProvider.propTypes = {
extension: propTypes.instanceOf(ExtensionRenderer).isRequired,
};
<|start_filename|>src/renderer/components/ErrorPanel.js<|end_filename|>
//
// Error Message
//
import propTypes from 'prop-types';
import styled from '@emotion/styled';
import { Renderer } from '@k8slens/extensions';
import { layout } from './styles';
const { Component } = Renderer;
const Error = styled.p(function () {
return {
marginTop: 2, // to center with icon
marginLeft: layout.pad,
};
});
const Panel = styled.div(function () {
return {
display: 'flex',
alignItems: 'flex-start', // make sure all content visible if needs scrolling
backgroundColor: 'var(--colorError)',
borderColor: 'var(--colorSoftError)',
borderWidth: 1,
borderStyle: 'solid',
borderRadius: layout.grid,
color: 'white',
padding: layout.pad,
maxHeight: 100,
overflow: 'auto',
};
});
export const ErrorPanel = function ({ children }) {
return (
<Panel>
<Component.Icon material="error_outline" />
<Error>{children}</Error>
</Panel>
);
};
ErrorPanel.propTypes = {
// zero or more child nodes
children: propTypes.oneOfType([
propTypes.arrayOf(propTypes.node),
propTypes.node,
]),
};
<|start_filename|>src/renderer/components/Loader.js<|end_filename|>
//
// A Lens Spinner with a message.
//
import propTypes from 'prop-types';
import styled from '@emotion/styled';
import { Renderer } from '@k8slens/extensions';
import { layout } from './styles';
const { Component } = Renderer;
const Wrapper = styled.div(function () {
return {
display: 'flex',
alignItems: 'center',
p: {
marginLeft: layout.pad,
},
};
});
export const Loader = function ({ message, ...spinProps }) {
const spinner = <Component.Spinner {...spinProps} />;
const props =
message && message.includes('<')
? { dangerouslySetInnerHTML: { __html: message } }
: undefined;
return message ? (
<Wrapper>
{spinner}
<p {...props}>{props ? undefined : message}</p>
</Wrapper>
) : (
spinner
);
};
Loader.propTypes = {
message: propTypes.string, // can contain HTML, in which case it's DANGEROUSLY rendered
singleColor: propTypes.bool,
center: propTypes.bool,
};
Loader.defaultProps = {
singleColor: true,
center: false,
};
<|start_filename|>src/renderer/components/ClusterPage/ClusterView.js<|end_filename|>
//
// Main view for the ClusterPage
//
import styled from '@emotion/styled';
import { Renderer } from '@k8slens/extensions';
import * as rtv from 'rtvjs';
import * as strings from '../../../strings';
import * as consts from '../../../constants';
import { layout, mixinPageStyles } from '../styles';
import { logger } from '../../../util/logger';
import { clusterEntityTs } from '../../../typesets';
const { Component } = Renderer;
//
// INTERNAL STYLED COMPONENTS
//
const ClusterInfo = styled.ul(() => ({
marginTop: 0,
listStyle: 'none',
'> li': {
display: 'flex',
alignItems: 'center',
marginTop: layout.pad,
'&:first-child': {
marginTop: 0,
},
'> a': {
display: 'flex',
alignItems: 'center',
// icon
i: {
marginRight: layout.grid,
},
},
},
}));
const PagePanel = styled.div(() => ({
marginTop: layout.gap,
marginBottom: layout.gap,
padding: layout.gap,
backgroundColor: 'var(--contentColor)',
width: '100%',
'&:first-child': {
marginTop: 0,
},
'&:last-child': {
marginBottom: 0,
},
'> h3': {
marginBottom: layout.gap,
},
}));
const PageContainer = styled.div(() => ({
...mixinPageStyles(),
display: 'flex',
flexDirection: 'column',
}));
//
// MAIN COMPONENT
//
export const ClusterView = function () {
const { activeEntity: clusterEntity } = Renderer.Catalog.catalogEntities;
if (
!clusterEntity ||
clusterEntity.metadata.source !== consts.catalog.source
) {
// this shouldn't happen, because this cluster page shouldn't be accessible
// as a menu item unless the Catalog has an active entity, and it's an MCC
// cluster (thanks to code in renderer.tsx) HOWEVER, Lens 5.2 has a lot of bugs
// around entity activation, so this is covering us just in case
logger.error(
'ClusterView.render()',
`Unable to render: Active Catalog entity ${
clusterEntity
? `is not from source "${consts.catalog.source}"`
: 'unknown'
}`
);
return null;
}
DEV_ENV && rtv.verify(clusterEntity, clusterEntityTs);
//
// STATE
//
//
// EVENTS
//
//
// EFFECTS
//
//
// RENDER
//
const browserUrl = `${clusterEntity.metadata.cloudUrl}/projects/${clusterEntity.metadata.namespace}/clusters/${clusterEntity.metadata.name}`;
return (
<PageContainer>
<h2>{strings.clusterPage.title()}</h2>
<PagePanel>
<ClusterInfo>
<li>
<a href={browserUrl} target="_blank" rel="noreferrer">
<Component.Icon material="open_in_new" />
<span>{strings.clusterView.infoPanel.viewInBrowser()}</span>
</a>
</li>
</ClusterInfo>
</PagePanel>
</PageContainer>
);
};
<|start_filename|>src/store/ClusterStore.js<|end_filename|>
//
// Cluster storage to persist clusters added to the Catalog by this extension
//
import { observable, toJS, makeObservable } from 'mobx';
import { Common } from '@k8slens/extensions';
import * as rtv from 'rtvjs';
import { logger } from '../util/logger';
import { clusterModelTs } from '../typesets';
/** RTV.js typeset for preferences model. */
export const storeTs = {
/** List of models representing each cluster in the Catalog added by this extension. */
models: [[clusterModelTs]],
};
/** Preferences auto-persisted by Lens. Singleton. Use `getInstance()` static method. */
export class ClusterStore extends Common.Store.ExtensionStore {
// NOTE: See main.ts#onActivate() and renderer.tsx#onActivate() where this.loadExtension()
// is called on the store instance in order to get Lens to load it from storage.
static getDefaults() {
return {
models: [],
};
}
/**
* List of onUpdate handlers to be called whenever this store gets updated from disk.
* @type {Array<Function>}
*/
updateHandlers = [];
/**
* [Stored]
* @property {Array<ClusterModel>} models List of models representing each cluster in
* the Catalog added by this extension.
*/
@observable models;
constructor() {
super({
configName: 'cluster-store',
defaults: ClusterStore.getDefaults(),
});
makeObservable(this);
}
/** Reset all preferences to their default values. */
reset() {
const defaults = ClusterStore.getDefaults();
Object.keys(this).forEach((key) => (this[key] = defaults[key]));
}
fromStore(store) {
const result = rtv.check({ store }, { store: storeTs });
if (!result.valid) {
logger.error(
'ClusterStore.fromStore()',
`Invalid data found, error="${result.message}"`
);
return;
}
Object.keys(store).forEach((key) => (this[key] = store[key]));
// call any onUpdate() handlers
this.updateHandlers.forEach((h) => h());
}
toJSON() {
// throw-away: just to get keys we care about on this
const defaults = ClusterStore.getDefaults();
const observableThis = Object.keys(defaults).reduce((obj, key) => {
obj[key] = this[key];
return obj;
}, {});
// return a deep-clone that is no longer observable
return toJS(observableThis);
}
/**
* Adds an onUpdate() handler if it hasn't already been added. This handler
* will be called whenever this store is updated from disk.
* @param {Function} handler
*/
addUpdateHandler(handler) {
if (!this.updateHandlers.find((h) => h === handler)) {
this.updateHandlers.push(handler);
}
}
/**
* Removes an onUpdate() handler if it's currently in the list.
* @param {Function} handler
*/
removeUpdateHandler(handler) {
const idx = this.updateHandlers.findIndex((h) => h === handler);
if (idx >= 0) {
this.updateHandlers.splice(idx, 1);
}
}
}
// create singleton instance, and export it for convenience (otherwise, one can also
// import the exported ClusterStore class and call ClusterStore.getInstance())
export const clusterStore = ClusterStore.createInstance();
<|start_filename|>src/renderer/IpcRenderer.js<|end_filename|>
//
// Renderer Process IPC API
//
import { Renderer } from '@k8slens/extensions';
import { logger } from '../util/logger';
import { ipcEvents } from '../constants';
export class IpcRenderer extends Renderer.Ipc {
//
// HANDLERS
//
/**
* Invoke a call on `logger` in the Renderer process.
* @param {string} level One of the logger/console methods, e.g. 'log', 'info', 'error'.
* @param {...any} args Anything else to pass to the Logger to be printed as data
* or parsed with '%s' placeholders in the message (same as `console` API).
*/
onLogger = (event, level, ...args) => {
// NOTE: while this does make it easier to get logger messages from the Main
// process, any modification to the Main process will require Lens to be
// completely restarted (more than just reloaded with CMD+R) in order to
// take effect
logger[level](...args);
};
//
// SINGLETON
//
constructor(extension) {
super(extension);
this.listen(ipcEvents.broadcast.LOGGER, this.onLogger);
}
}
<|start_filename|>src/renderer/components/GlobalPage/AddClusters.js<|end_filename|>
//
// Add Clusters Panel
//
import propTypes from 'prop-types';
import styled from '@emotion/styled';
import { Renderer } from '@k8slens/extensions';
import { Cluster } from '../../store/Cluster';
import { useClusterActions } from '../../store/ClusterActionsProvider';
import { Section as BaseSection } from '../Section';
import { layout } from '../styles';
import { InlineNotice } from '../InlineNotice';
import * as strings from '../../../strings';
const { Component } = Renderer;
const Section = styled(BaseSection)(function () {
return {
small: {
marginTop: -(layout.gap - layout.grid),
},
};
});
export const AddClusters = function ({ onAdd, clusters }) {
//
// STATE
//
const {
state: { loading: addClustersLoading, ssoAddClustersInProgress },
actions: clusterActions,
} = useClusterActions();
//
// EVENTS
//
const handleAddClick = function () {
if (typeof onAdd === 'function') {
onAdd();
}
};
const handleSsoCancelClick = function () {
clusterActions.ssoCancelAddClusters();
};
//
// RENDER
//
return (
<Section className="lecc-AddClusters">
<h3>{strings.addClusters.title()}</h3>
<div>
<Component.Button
primary
disabled={clusters.length <= 0 || addClustersLoading}
label={strings.addClusters.action.label()}
waiting={addClustersLoading}
tooltip={
clusters.length <= 0
? strings.addClusters.action.disabledTip()
: undefined
}
onClick={handleAddClick}
/>
</div>
{addClustersLoading && ssoAddClustersInProgress && (
<>
<InlineNotice>
<p
dangerouslySetInnerHTML={{
__html: strings.addClusters.sso.messageHtml(),
}}
/>
</InlineNotice>
<div>
<Component.Button
primary
label={strings.addClusters.action.ssoCancel()}
onClick={handleSsoCancelClick}
/>
</div>
</>
)}
</Section>
);
};
AddClusters.propTypes = {
clusters: propTypes.arrayOf(propTypes.instanceOf(Cluster)),
onAdd: propTypes.func, // signature: () => void
};
AddClusters.defaultProps = {
clusters: [],
};
<|start_filename|>src/renderer/components/InfoPanel.js<|end_filename|>
//
// Info Message
//
import propTypes from 'prop-types';
import styled from '@emotion/styled';
import { Renderer } from '@k8slens/extensions';
import { layout } from './styles';
const { Component } = Renderer;
const Info = styled.p(function () {
return {
marginTop: 2, // to center with icon
marginLeft: layout.pad,
};
});
const Panel = styled.div(function () {
return {
display: 'flex',
alignItems: 'flex-start', // make sure all content visible if needs scrolling
backgroundColor: 'var(--colorInfo)',
borderColor: 'transparent',
borderWidth: 1,
borderStyle: 'solid',
borderRadius: layout.grid,
color: 'white',
padding: layout.pad,
maxHeight: 100,
overflow: 'auto',
};
});
export const InfoPanel = function ({ children }) {
return (
<Panel>
<Component.Icon material="info_outline" />
<Info>{children}</Info>
</Panel>
);
};
InfoPanel.propTypes = {
// zero or more child nodes
children: propTypes.oneOfType([
propTypes.arrayOf(propTypes.node),
propTypes.node,
]),
};
<|start_filename|>src/store/PreferenceStore.js<|end_filename|>
//
// Preferences management that uses a Lens Store for persistence
//
import { observable, toJS, makeObservable } from 'mobx';
import { Common } from '@k8slens/extensions';
import * as rtv from 'rtvjs';
import { logger } from '../util/logger';
/** RTV.js typeset for preferences model. */
export const storeTs = {
/** MCC instance URL, does NOT end with a slash. */
cloudUrl: [
rtv.EXPECTED,
rtv.STRING,
(v) => {
if (v && v.match(/\/$/)) {
throw new Error('cloudUrl must not end with a slash');
}
},
],
/** Username used for authentication purposes to the MCC instance. */
username: [rtv.EXPECTED, rtv.STRING],
/** Absolute path where kubeconfigs are to be saved. */
savePath: [rtv.EXPECTED, rtv.STRING],
/**
* If true, the refresh token generated for the clusters will be enabled for
* offline access. WARNING: This is less secure than a normal refresh token as
* it will never expire.
*/
offline: [rtv.EXPECTED, rtv.BOOLEAN],
};
/** Preferences auto-persisted by Lens. Singleton. Use `getInstance()` static method. */
export class PreferenceStore extends Common.Store.ExtensionStore {
// NOTE: See main.ts#onActivate() and renderer.tsx#onActivate() where this.loadExtension()
// is called on the store instance in order to get Lens to load it from storage.
// ultimately, we try to set this to the getExtensionFileFolder() directory that
// Lens gives the extension, but we don't know what it is until later
static defaultSavePath = null;
static getDefaults() {
return {
cloudUrl: null,
username: null,
savePath: PreferenceStore.defaultSavePath,
offline: false,
};
}
/**
* List of onUpdate handlers to be called whenever this store gets updated from disk.
* @type {Array<Function>}
*/
updateHandlers = [];
/**
* [Stored]
* @property {string|null} cloudUrl URL to the MCC instance.
*/
@observable cloudUrl;
/**
* [Stored]
* @property {string|null} username Username used to log into the MCC instance.
*/
@observable username;
/**
* [Stored]
* @property {string|null} savePath Absolute path on the local disk where kubeConfig
* files should be saved.
*/
@observable savePath;
/**
* [Stored]
* @property {boolean} offline True if kubeConfigs generated by this extension should
* use offline tokens; false if not.
*/
@observable offline;
constructor() {
super({
configName: 'preference-store',
defaults: PreferenceStore.getDefaults(),
});
makeObservable(this);
}
/** Reset all preferences to their default values. */
reset() {
const defaults = PreferenceStore.getDefaults();
Object.keys(this).forEach((key) => (this[key] = defaults[key]));
}
fromStore(store) {
const result = rtv.check({ store }, { store: storeTs });
if (!result.valid) {
logger.error(
'PreferenceStore.fromStore()',
`Invalid data found, error="${result.message}"`
);
return;
}
Object.keys(store).forEach((key) => (this[key] = store[key]));
// call any onUpdate() handlers
this.updateHandlers.forEach((h) => h());
}
toJSON() {
// throw-away: just to get keys we care about on this
const defaults = PreferenceStore.getDefaults();
const observableThis = Object.keys(defaults).reduce((obj, key) => {
obj[key] = this[key];
return obj;
}, {});
// return a deep-clone that is no longer observable
return toJS(observableThis);
}
/**
* Adds an onUpdate() handler if it hasn't already been added. This handler
* will be called whenever this store is updated from disk.
* @param {Function} handler
*/
addUpdateHandler(handler) {
if (!this.updateHandlers.find((h) => h === handler)) {
this.updateHandlers.push(handler);
}
}
/**
* Removes an onUpdate() handler if it's currently in the list.
* @param {Function} handler
*/
removeUpdateHandler(handler) {
const idx = this.updateHandlers.findIndex((h) => h === handler);
if (idx >= 0) {
this.updateHandlers.splice(idx, 1);
}
}
}
// create singleton instance, and export it for convenience (otherwise, one can also
// import the exported PreferenceStore class and call PreferenceStore.getInstance())
export const prefStore = PreferenceStore.createInstance();
<|start_filename|>src/renderer/hooks/useClusterLoader.js<|end_filename|>
import { useEffect } from 'react';
import { useExtState } from '../store/ExtStateProvider';
import { useConfig } from '../store/ConfigProvider';
import { useSsoAuth } from '../store/SsoAuthProvider';
import { useClusterData } from '../store/ClusterDataProvider';
import { logger } from '../../util/logger';
/**
* Custom hook that loads the MCC config, authenticates with the instance, and
* loads all available clusters.
* @param {string} [activeEventType] If set, the type of extension event
* (from the `eventBus`) that is currently being handled; otherwise, the
* extension is assumed to be in its 'normal' state.
* @param {string} [onlyNamespaces] If set, when cluster data is loaded, only
* clusters in these namespaces will be considered; otherwise, all clusters in
* all namespaces will be considered.
*/
export const useClusterLoader = function (
activeEventType = null,
onlyNamespaces = null
) {
//
// STATE
//
const {
state: {
authAccess,
prefs: { cloudUrl },
},
actions: extActions,
} = useExtState();
const {
state: {
loading: configLoading,
loaded: configLoaded,
error: configError,
config,
},
} = useConfig();
const {
state: {
loading: ssoAuthLoading,
loaded: ssoAuthLoaded,
error: ssoAuthError,
},
actions: ssoAuthActions,
} = useSsoAuth();
const {
state: {
loading: clusterDataLoading,
loaded: clusterDataLoaded,
error: clusterDataError,
},
actions: clusterDataActions,
} = useClusterData();
//
// EFFECTS
//
useEffect(
function () {
if (authAccess.changed) {
// capture any changes after authenticating (steps 1a or 1b) or loading clusters
extActions.setAuthAccess(authAccess);
}
},
[authAccess, extActions]
);
useEffect(
function () {
if (DEV_ENV) {
logger.log(
'hooks/useClusterLoader',
`=== CONFIG configLoading=${configLoading}, configLoaded=${configLoaded}, configError=${
configError ? `"${configError}"` : '<none>'
}`,
{ config }
);
}
},
[config, configLoading, configLoaded, configError]
);
// 1. Authenticate with SSO auth IIF config says MCC instance IS SSO
useEffect(
function () {
if (DEV_ENV) {
logger.log(
'hooks/useClusterLoader#ssoAuth',
`=== config=${
config ? '<set>' : '<none>'
}, ssoAuthLoading=${ssoAuthLoading}, ssoAuthLoaded=${ssoAuthLoaded}, ssoAuthError=${
ssoAuthError ? `"${ssoAuthError}"` : '<none>'
}, activeEventType=${activeEventType}, authAccess.isValid()=${authAccess.isValid()}`,
{ cloudUrl, config, authAccess, ssoAuthLoading, ssoAuthLoaded }
);
}
if (
cloudUrl && // MCC instance is known
config && // config loaded
config.keycloakLogin && // WITH SSO gates this effect
authAccess.usesSso && // auth access is for SSO auth
!ssoAuthLoading &&
!ssoAuthLoaded
) {
if (authAccess.isValid()) {
// skip authentication, go straight for the clusters
DEV_ENV &&
logger.log(
'hooks/useClusterLoader#ssoAuth',
'====== SSO skipped: authorized'
);
ssoAuthActions.setAuthorized();
} else {
DEV_ENV &&
logger.log(
'hooks/useClusterLoader#ssoAuth',
'====== starting authentication...'
);
ssoAuthActions.startAuthorization({ config });
}
} else if (DEV_ENV) {
logger.log(
'hooks/useClusterLoader#ssoAuth',
'====== not authenticating'
);
}
},
[
ssoAuthLoading,
ssoAuthLoaded,
ssoAuthError,
ssoAuthActions,
authAccess,
cloudUrl,
config,
activeEventType,
]
);
// 2. get cluster data (ie. the list of clusters available to the user)
useEffect(
function () {
if (DEV_ENV) {
logger.log(
'hooks/useClusterLoader#getClusterData',
`=== config=${
config ? '<set>' : '<none>'
}, authAccess.isValid()=${authAccess.isValid()}, clusterDataLoading=${clusterDataLoading}, clusterDataLoaded=${clusterDataLoaded}, clusterDataError=${
clusterDataError ? `"${clusterDataError}"` : '<none>'
}, onlyNamespaces=${onlyNamespaces?.join(',')}`,
{
cloudUrl,
config,
ssoAuthLoaded,
authAccess,
clusterDataLoading,
clusterDataLoaded,
}
);
}
if (
cloudUrl && // MCC instance is known
config && // config loaded
ssoAuthLoaded && // must be authenticated at this point
authAccess.isValid() && // must have valid tokens (they may have expired)
!clusterDataLoading &&
!clusterDataLoaded
) {
DEV_ENV &&
logger.log(
'hooks/useClusterLoader#getClusterData',
'====== fetching...'
);
clusterDataActions.load({
cloudUrl,
config,
authAccess,
onlyNamespaces,
});
} else if (DEV_ENV) {
logger.log('hooks/useClusterLoader#getClusterData', '====== no fetch');
}
},
[
cloudUrl,
authAccess,
config,
ssoAuthLoaded,
clusterDataLoading,
clusterDataLoaded,
clusterDataError,
clusterDataActions,
activeEventType,
onlyNamespaces,
]
);
};
<|start_filename|>src/renderer/store/ProviderStore.js<|end_filename|>
import { Renderer } from '@k8slens/extensions';
import { noteOwner } from '../../strings';
const {
Component: { Notifications },
} = Renderer;
/**
* Defines common functionality for use when implementing a React context provider
* that provides async state updates.
*/
export class ProviderStore {
constructor() {
/**
* @property {function} setState Function to call to trigger updates to the provider.
*
* NOTE: Per https://reactjs.org/docs/hooks-reference.html#usestate,
* "React guarantees that setState function identity is stable and won’t change
* on re-renders." For this reason, it's safe to set once when the `setState`
* function is first generated, but there's no harm in setting it on every
* provider render.
*/
this.setState = null; // set this directly whenever the context is updated
this.store = this.makeNew(); // initialize
}
// convenience accessor for basic store property
/** @property {boolean} loading If the loading state is true. */
get loading() {
return this.store.loading;
}
set loading(newValue) {
this.store.loading = !!newValue;
}
// convenience accessor for basic store property
/** @property {boolean} loaded If the loaded state is true. */
get loaded() {
return this.store.loaded;
}
set loaded(newValue) {
this.store.loaded = !!newValue;
}
// convenience accessor for basic store property
/**
* @property {string|undefined} error The error encountered, if any.
* Empty string is considered NOT an error.
*/
get error() {
return this.store.error;
}
set error(newValue) {
this.store.error = newValue || undefined; // empty string is NOT an error
}
/**
* Generates a new, empty store object set to its initial state.
* @returns {Object}
*/
makeNew() {
return {
loading: false, // {boolean} true if currently loading data
loaded: false, // {boolean} true if load is complete (regardless of error)
error: undefined, // {string} if an error occurred; undefined otherwise
};
}
/**
* Creates a SHALLOW clone of `this.store`. Override if a deep clone is necessary.
* @returns {Object} Cloned store object.
*/
clone() {
// NOTE: by deep-cloning the store, React will detect changes to the root object,
// as well as to any nested objects, triggering a render regardless of whether a
// component is depending on the root, or on one of its children
return { ...this.store }; // assume only primitive props so no need to deep-clone.
}
/**
* Resets store state. Data will need to be reloaded.
* @param {boolean} [loading] True if resetting because data is loading; false if
* just resetting to initial state.
*/
reset(loading = false) {
Object.assign(this.store, this.makeNew()); // replace all properties with totally new ones
this.store.loading = loading;
this.onChange();
}
/**
* Forces an update to the provider's state.
*/
triggerContextUpdate() {
if (typeof this.setState !== 'function') {
throw new Error(
'[ProviderStore.triggerContextUpdate()] setState() is not configured: Unable to trigger a context update'
);
}
this.setState(this.clone());
}
/**
* Checks the store's `error` property and if it's not empty/falsy, posts
* an error notification to the UI with its content.
*/
notifyIfError() {
if (this.store.error) {
Notifications.error(`${this.store.error} ${noteOwner}`);
}
}
/**
* Called when a store property has changed to validate the current store data.
* Base implementation does nothing. Override should throw an error on failure.
*/
validate() {}
/**
* Called when a store property has changed.
*/
onChange() {
this.validate();
this.triggerContextUpdate();
}
}
<|start_filename|>src/renderer/store/ClusterActionsProvider.js<|end_filename|>
//
// Provider for adding clusters to Lens
//
import { createContext, useContext, useState, useMemo } from 'react';
import * as rtv from 'rtvjs';
import { promises as fs } from 'fs';
import path from 'path';
import { Common, Renderer } from '@k8slens/extensions';
import { AuthClient } from '../auth/clients/AuthClient';
import { kubeConfigTemplate } from '../../util/templates';
import { AuthAccess } from '../auth/AuthAccess';
import { ProviderStore } from './ProviderStore';
import { Cluster } from './Cluster';
import { logger } from '../../util/logger';
import { clusterModelTs } from '../../typesets';
import { extractJwtPayload } from '../auth/authUtil';
import { IpcRenderer } from '../IpcRenderer';
import { getLensClusters } from '../rendererUtil';
import * as strings from '../../strings';
import * as consts from '../../constants';
const { Util } = Common;
const {
Component: { Notifications },
} = Renderer;
// OAuth2 'state' parameter value to use when requesting tokens for one cluster out of many
export const SSO_STATE_ADD_CLUSTERS = 'add-clusters';
//
// Store
//
class ClusterActionsProviderStore extends ProviderStore {
// @override
makeNew() {
return {
...super.makeNew(),
newWorkspaces: [], // {Array<Workspace>} list of new workspaces created, if any; shape: https://github.com/lensapp/lens/blob/00be4aa184089c1a6c7247bdbfd408665f325665/src/common/workspace-pr.store.ts#L27
kubeClusterAdded: false, // true if the cluster added via single kubeConfig was added; false if it was skipped because it was already in Lens
ssoAddClustersInProgress: false, // true (along with `loading`) if an 'add clusters' operation is in progress
};
}
}
const pr = new ClusterActionsProviderStore();
//
// Internal Methods
//
/**
* Posts an info-style notification that auto-dismissed after X seconds unless the
* user interacts with it or mouses over it.
* @param {Component} Message Message to post.
* @param {Object} [options] Additional notification options. See
* https://docs.k8slens.dev/v4.2.4/extensions/api/interfaces/_renderer_api_components_.notification/
* for options (all except for `message`). The actual code is probably more helpful than
* those API docs...
* https://github.com/lensapp/lens/blob/70a8982c9f6396107f92aeced465620761d90726/src/renderer/components/notifications/notifications.tsx#L32
*/
const _postInfo = function (Message, options) {
Notifications.info(Message, {
// default is 0 (which means user must manually dismiss), and Notifications.ok()
// is similar to info() but dismissed in 2500ms which feels a bit short
timeout: 3500,
...options,
});
};
/**
* Determines if a cluster is already in Lens and returns its Lens Cluster object.
* @param {string|Cluster} cluster Cluster ID, or cluster object, to check.
* @returns {LensCluster|undefined} Lens Cluster if the cluster is already in Lens;
* `undefined` otherwise.
*/
const _getLensCluster = function (cluster) {
const existingLensClusters = getLensClusters();
const clusterId = cluster instanceof Cluster ? cluster.id : cluster;
return existingLensClusters.find(
(lensCluster) => lensCluster.metadata.uid === clusterId
);
};
/**
* Filters clusters into separate lists.
* @param {Array<Cluster>} clusters
* @returns {{ newClusters: Array<Cluster>, existingClusters: Array<Cluster>}}
* `newClusters` are those which do not exist in Lens; `existingClusters` are
* those that do.
*/
const _filterClusters = function (clusters) {
const existingClusters = []; // {Array<Cluster>} clusters already existing in Lens
// filter the clusters down to only clusters that aren't already in Lens
const newClusters = clusters.filter((cluster) => {
if (_getLensCluster(cluster)) {
existingClusters.push(cluster);
return false; // skip it
}
return true; // add it
});
return {
newClusters,
existingClusters,
};
};
/**
* [ASYNC] Gets access tokens for the specified cluster using SSO AUTH.
* @param {Object} options
* @param {Cluster} options.cluster The cluster to access.
* @param {Object} options.oAuth The OAuth response request parameters as JSON.
* `code` is the authorization code needed to obtain access tokens for the cluster.
* @param {Object} options.config MCC Config object.
* @param {boolean} [options.offline] If true, the refresh token generated for the
* clusters will be enabled for offline access. WARNING: This is less secure
* than a normal refresh token as it will never expire.
* @returns {Promise<Object>} On success, `{authAccess: AuthAccess}`, a new AuthAccess
* object that contains the token information; on error, `{error: string}`.
* @throws {Error} If `config` is not using SSO.
*/
const _getClusterAccess = async function ({
cluster,
oAuth,
config,
offline = false,
}) {
if (!config.keycloakLogin) {
throw new Error('_getClusterAccess() does not support basic auth');
}
const authClient = new AuthClient({ config });
let body;
let error;
if (oAuth.code) {
({ body, error } = await authClient.getToken({
authCode: oAuth.code,
clientId: cluster.idpClientId, // tokens unique to the cluster
offline,
}));
} else {
// no code, something went wrong
error = oAuth.error || oAuth.error_description || 'unknown';
}
let authAccess;
if (error) {
logger.error(
'ClusterActionsProvider._ssoGetClusterAccess()',
`Failed to get tokens from authorization code, error="${error}"`
);
error = strings.clusterActionsProvider.error.sso.authCode(cluster.id);
} else {
const jwt = extractJwtPayload(body.id_token);
if (jwt.preferred_username) {
authAccess = new AuthAccess({
username: jwt.preferred_username,
usesSso: true,
...body,
});
} else {
logger.error(
'ClusterActionsProvider._ssoGetClusterAccess()',
'Failed to get username from token JWT'
);
error = strings.clusterActionsProvider.error.sso.authCode(cluster.id);
}
}
return { authAccess, error };
};
/**
* [ASYNC] Writes a kubeConfig to the local disk for the given cluster.
* @param {string} options.cloudUrl MCC instance base URL that owns the cluster.
* @param {string} options.namespace MCC namespace the cluster comes from.
* @param {string} options.clusterName Name of the cluster in MCC.
* @param {string} options.clusterId ID of the cluster in MCC.
* @param {Object} options.kubeConfig Kubeconfig JSON object for the cluster.
* @param {string} options.savePath Absolute path where kubeConfigs are to be saved.
* @returns {Promise<Object>} On success, `{model: ClusterModel}`, a cluster model
* to use to add the cluster to Lens (see `clusterModelTs` typeset for interface.);
* on error, `{error: string}`. All clusters are configured to be added to the active
* workspace by default.
*/
const _writeKubeConfig = async function ({
cloudUrl,
namespace,
clusterName,
clusterId,
kubeConfig,
savePath,
}) {
const errPrefix =
strings.clusterActionsProvider.error.kubeConfigSave(clusterId);
const kubeconfigPath = path.resolve(
savePath,
`${namespace}-${clusterName}-${clusterId}.json`
);
try {
// overwrite if already exists for some reason (failed to delete last time?)
await fs.writeFile(
kubeconfigPath,
JSON.stringify(kubeConfig, undefined, 2)
);
} catch (err) {
return { error: `${errPrefix}: ${err.message}` };
}
return {
model: {
// NOTE: This is a partial KubernetesCluster model to be used when adding
// a cluster to a Catalog source with `new Catalog.KubernetesCluster(model)`.
// See the following references for a complete notion of what propties are supported:
// @see node_modules/@k8slens/extensions/dist/src/common/catalog-entities/kubernetes-cluster.d.ts
// @see node_modules/@k8slens/extensions/dist/src/common/catalog/catalog-entity.d.ts
metadata: {
// native metadata
uid: clusterId,
name: clusterName,
// custom metadata
namespace,
cloudUrl,
},
spec: {
kubeconfigPath,
kubeconfigContext: kubeConfig.contexts[0].name, // must be same context name used in the kubeConfig file
},
status: {
phase: 'disconnected',
},
},
};
};
/**
* Posts a notification about new clusters added to Lens.
* @param {Array<{namespace: string, id: string, name: string}>} clusterShims List
* objects with basic cluster info about which clusters were added.
* @param {boolean} [sticky] True if the notification should be sticky; false
* if it should be quickly dismissed.
*/
const _notifyNewClusters = function (clusterShims) {
_postInfo(
<p
dangerouslySetInnerHTML={{
__html: strings.clusterActionsProvider.notifications.newClustersHtml(
clusterShims.map((c) => `${c.namespace}/${c.name}`)
),
}}
/>
);
};
/**
* Switch to the specified cluster (and associated workspace).
* @param {string} clusterId ID of the cluster in Lens.
*/
const _switchToCluster = function (clusterId) {
// NOTE: we need a short delay to ensure the navigation actually takes place,
// since we have just navigated to this extension's global page, and we're
// about to navigate elsewhere; if we do it too quickly, we end-up with a
// race condition and this navigation doesn't take place (we remain on our
// global page instead of going to the cluster)
setTimeout(() => Renderer.Navigation.navigate(`/cluster/${clusterId}`), 500);
};
/**
* Switch to the Catalog view to show all the clusters.
*/
const _switchToCatalog = function () {
// TODO: eventually, once Lens supports filtering the Catalog view via navigation,
// like `/catalog#filter=...`, it would be great to switch to the Catalog
// view and somehow filter the list to only the clusters that were just added,
// or at least only MCC clusters with `mcc=true` label...
// NOTE: adding a little delay seems to just help navigation work better
setTimeout(() => Renderer.Navigation.navigate('/catalog'), 500);
};
/**
* Adds metadata to cluster models to prepare them to be added to the Lens Catalog.
* @param {Array<{namespace: string, id: string, name: string}>} clusterPartials
* Clusters being added. Subset of full `./Cluster.js` class properties.
* @param {Array<ClusterModel>} models List of models for clusters to add, one for each
* item in `clusters`. See `clusterModelTs` typeset for interface.
*/
const _addMetadata = function (clusterPartials, models) {
DEV_ENV &&
rtv.verify(
{
clusterPartials,
models,
},
{
clusterPartials: [
[{ namespace: rtv.STRING, name: rtv.STRING, id: rtv.STRING }],
],
models: [
[clusterModelTs],
(value, match, typeset, { parent }) =>
parent.clusterPartials.length === value.length,
],
}
);
clusterPartials.forEach((partial, idx) => {
const model = models[idx];
// NOTE: @see `CatalogEntityMetadata` in
// `node_modules/@k8slens/extensions/dist/src/common/catalog/catalog-entity.d.ts`
// for required/supported/optional metadata properties; note that arbitrary
// metadata properties are also supported
// Lens-specific optional metadata
model.metadata.source = consts.catalog.source;
model.metadata.labels = {
[consts.catalog.labels.source]: 'true', // labels must be strings only
[consts.catalog.labels.namespace]: partial.namespace,
};
});
};
/**
* [ASYNC] Sends the specified cluster models to the Lens Catalog to be added.
* @param {Array<ClusterModel>} models Models for the clusters to add to (send to)
* the Lens Catalog for display.
* @returns {Promise.<Object>} Does not fail. Resolves to an empty object on success;
* resolves to an object with `error: string` property on failure.
*/
const _sendClustersToCatalog = async function (models) {
try {
await IpcRenderer.getInstance().invoke(
consts.ipcEvents.invoke.ADD_CLUSTERS,
models
);
} catch (err) {
return { error: err.message };
}
return {};
};
/**
* [ASYNC] Add the specified cluster kubeConfigs to Lens.
* @param {Object} options
* @param {string} options.cloudUrl MCC URL. Must NOT end with a slash.
* @param {Array<Cluster>} options.newClusters New clusters to add that are not already in Lens.
* @param {Array<Promise<{cluster: Cluster, kubeConfig: Object}>>} options.promises Promises that
* are designed NOT to fail, and which will yield objects containing `cluster` and
* associated `kubeConfig` for each cluster in `newClusters`.
* @param {string} options.savePath Absolute path where kubeConfigs are to be saved.
* to new (or existing if the workspaces already exist) workspaces that
* correlate to their original MCC namespaces; otherwise, they will all
* be added to the active workspace.
* @returns {Promise.<Object>} Does not fail. Resolves to an empty object on success;
* resolves to an object with `error: string` property on failure.
*/
const _addClusterKubeConfigs = async function ({
cloudUrl,
newClusters,
promises,
savePath,
}) {
// these promises are designed NOT to reject
let results = await Promise.all(promises); // {Array<{cluster: Cluster, kubeConfig: Object}>} on success
let failure = results.find((res) => !!res.error); // look for any errors, use first-found
if (!failure) {
// write each kubeConfig to disk
promises = results.map(({ cluster, kubeConfig }) =>
_writeKubeConfig({
cloudUrl,
namespace: cluster.namespace,
clusterName: cluster.name,
clusterId: cluster.id,
kubeConfig,
savePath,
})
);
// these promises are designed NOT to reject
results = await Promise.all(promises); // {Array<{model: ClusterModel}>} on success
failure = results.find((res) => !!res.error); // look for any errors, use first-found
}
if (failure) {
return { error: failure.error };
} else {
const models = results.map(({ model }) => model);
_addMetadata(newClusters, models);
const sendResult = await _sendClustersToCatalog(models);
if (sendResult.error) {
logger.error(
'ClusterActionProvider._addClusterKubeConfigs()',
'Failed to add some clusters to Catalog, error="%s"',
sendResult.error
);
return { error: strings.clusterActionsProvider.error.catalogAddFailed() };
}
if (newClusters.length > 0) {
_notifyNewClusters(newClusters);
}
}
return {};
};
/**
* [ASYNC] Add the specified clusters to Lens.
* @param {Object} options
* @param {Array<Cluster>} options.clusters Clusters to add.
* @param {Object} options.config MCC Config object.
* @param {boolean} [options.offline] If true, the refresh token generated for the
* clusters will be enabled for offline access. WARNING: This is less secure
* than a normal refresh token as it will never expire.
*/
const _addClusters = async function ({ clusters, config, offline = false }) {
pr.reset(true);
const { newClusters, existingClusters } = _filterClusters(clusters);
if (newClusters.length > 0) {
pr.store.ssoAddClustersInProgress = true;
const authClient = new AuthClient({ config });
const url = authClient.getSsoAuthUrl({
offline,
clientId: newClusters[0].idpClientId, // tokens unique to the cluster
state: SSO_STATE_ADD_CLUSTERS,
});
// NOTE: at this point, the event loop slice ends and we wait for the user to
// respond in the browser
Util.openExternal(url); // open in default browser
} else if (existingClusters.length > 0) {
_postInfo(
<p
dangerouslySetInnerHTML={{
__html: strings.clusterActionsProvider.notifications.skippedClusters(
existingClusters.map(
(cluster) => `${cluster.namespace}/${cluster.name}`
)
),
}}
/>
);
pr.loading = false;
pr.loaded = true;
pr.notifyIfError();
pr.onChange();
}
};
/**
* [ASYNC] Finish the SSO process to add the specified cluster (ONE) to Lens.
* @param {Object} options
* @param {Object} options.oAuth The OAuth response request parameters as JSON.
* `code` is the authorization code needed to obtain access tokens for the cluster.
* @param {Array<Cluster>} options.clusters Clusters to add. Must only be one, and it's
* assumed NOT to already be in Lens.
* @param {string} options.savePath Absolute path where kubeConfigs are to be saved.
* @param {string} options.cloudUrl MCC URL. Must NOT end with a slash.
* @param {Object} options.config MCC Config object.
* @param {boolean} [options.offline] If true, the refresh token generated for the
* clusters will be enabled for offline access. WARNING: This is less secure
* than a normal refresh token as it will never expire.
* @throws {Error} If the store is not aware of a pending 'add clusters' operation.
*/
const _ssoFinishAddClusters = async function ({
oAuth,
clusters,
savePath,
cloudUrl,
config,
offline = false,
}) {
if (!pr.store.ssoAddClustersInProgress) {
throw new Error('A pending "add clusters" operation must be in progress');
}
if (clusters.length !== 1) {
throw new Error('Expecting exactly one cluster to add');
}
const cluster = clusters[0];
const { error: accessError, authAccess } = await _getClusterAccess({
cluster,
oAuth,
config,
offline,
});
if (accessError) {
pr.error = accessError;
} else {
const addResult = await _addClusterKubeConfigs({
cloudUrl,
newClusters: [cluster],
existingClusters: [],
promises: [
Promise.resolve({
cluster,
kubeConfig: kubeConfigTemplate({
username: authAccess.username,
token: authAccess.token,
refreshToken: authAccess.refreshToken,
cluster,
}),
}),
],
savePath,
});
if (addResult.error) {
pr.error = addResult.error;
}
}
pr.store.ssoAddClustersInProgress = false;
pr.loading = false;
pr.loaded = true;
pr.notifyIfError();
pr.onChange();
if (!pr.error) {
_switchToCatalog();
}
};
/**
* [ASYNC] Add the specified cluster via kubeConfig to Lens.
* @param {Object} options
* @param {string} options.savePath Absolute path where kubeConfigs are to be saved.
* @param {string} options.cloudUrl MCC URL. Must NOT end with a slash.
* @param {Object} options.kubeConfig KubeConfig object for the cluster to add.
* @param {string} options.namespace MCC namespace to which the cluster belongs.
* @param {string} options.clusterName Name of the cluster.
* @param {string} options.clusterId ID of the cluster.
*/
const _addKubeCluster = async function ({
savePath,
cloudUrl,
kubeConfig,
namespace,
clusterName,
clusterId,
}) {
pr.reset(true);
if (_getLensCluster(clusterId)) {
_postInfo(
<p
dangerouslySetInnerHTML={{
__html: strings.clusterActionsProvider.notifications.skippedClusters([
`${namespace}/${clusterName}`,
]),
}}
/>
);
} else {
const { model, error } = await _writeKubeConfig({
cloudUrl,
namespace,
clusterName,
clusterId,
kubeConfig,
savePath,
});
if (error) {
pr.error = error;
} else {
const partial = { namespace, id: clusterId, name: clusterName };
_addMetadata([partial], [model]);
const sendResult = await _sendClustersToCatalog([model]);
if (sendResult.error) {
logger.error(
'ClusterActionProvider._addKubeCluster()',
'Failed to add cluster to Catalog, error="%s"',
sendResult.error
);
pr.error = strings.clusterActionsProvider.error.catalogAddFailed();
} else {
pr.store.kubeClusterAdded = true;
_notifyNewClusters([{ namespace, id: clusterId, name: clusterName }]);
_switchToCluster(clusterId);
}
}
}
pr.loading = false;
pr.loaded = true;
pr.notifyIfError();
pr.onChange();
};
/**
* [ASYNC] Activate the specified cluster in Lens if it already exists.
* @param {Object} options
* @param {string} options.namespace MCC namespace to which the cluster belongs.
* @param {string} options.clusterName Name of the cluster.
* @param {string} options.clusterId ID of the cluster.
*/
const _activateCluster = function ({ namespace, clusterName, clusterId }) {
pr.reset(true);
const lensCluster = _getLensCluster(clusterId);
if (lensCluster) {
_switchToCluster(clusterId, lensCluster.workspace);
} else {
pr.error = strings.clusterActionsProvider.error.clusterNotFound(
`${namespace}/${clusterName}`
);
}
pr.loading = false;
pr.loaded = true;
pr.notifyIfError();
pr.onChange();
};
//
// Provider Definition
//
const ClusterActionsContext = createContext();
export const useClusterActions = function () {
const context = useContext(ClusterActionsContext);
if (!context) {
throw new Error(
'useAddClusters must be used within an ClusterActionsProvider'
);
}
// NOTE: `context` is the value of the `value` prop we set on the
// <AddClustersContext.Provider value={...}/> we return as the <ClusterActionsProvider/>
// component to wrap all children that should have access to the state (i.e.
// all the children that will be able to `useAddClusters()` to access the state)
const [state] = context;
// this is what you actually get from `useAddClusters()` when you consume it
return {
state,
//// ACTIONS
actions: {
/**
* [ASYNC] Add the specified clusters to Lens.
*
* This method will __start__ the SSO process to add the cluster (must only be one).
* `ssoFinishAddClusters()` must be called once the OAuth authorization code has been
* obtained in order to finish adding it.
*
* @param {Object} options
* @param {Array<Cluster>} options.clusters Clusters to add.
* @param {Object} options.config MCC Config object.
* @param {boolean} [options.offline] If true, the refresh token generated for the
* clusters will be enabled for offline access. WARNING: This is less secure
* than a normal refresh token as it will never expire.
* @throws {Error} If using SSO and `options.clusters` contains more than 1 cluster.
*/
addClusters(options) {
if (!pr.loading && options.clusters.length > 0) {
if (options.clusters.length > 1) {
throw new Error(
'Cannot add more than one cluster at a time under SSO'
);
}
_addClusters(options);
}
},
/**
* [ASYNC] Finish the SSO process to add the specified cluster (ONE) to Lens.
* @param {Object} options
* @param {Object} options.oAuth The OAuth response request parameters as JSON.
* `code` is the authorization code needed to obtain access tokens for the cluster.
* @param {Array<Cluster>} options.clusters Clusters to add. Must only be one, and it's
* assumed NOT to already be in Lens.
* @param {string} options.savePath Absolute path where kubeConfigs are to be saved.
* @param {string} options.cloudUrl MCC URL. Must NOT end with a slash.
* @param {Object} options.config MCC Config object.
* @param {boolean} [options.offline] If true, the refresh token generated for the
* clusters will be enabled for offline access. WARNING: This is less secure
* than a normal refresh token as it will never expire.
* @throws {Error} If `options.config` is not using SSO.
* @throws {Error} If `options.clusters` contains more than 1 cluster.
*/
ssoFinishAddClusters(options) {
if (pr.store.ssoAddClustersInProgress) {
if (!options.config.keycloakLogin) {
throw new Error('Config is not using SSO');
}
if (options.clusters.length !== 1) {
throw new Error(
'Exactly one cluster must be specified for adding to Lens'
);
}
_ssoFinishAddClusters(options);
}
},
/**
* Cancels an outstanding SSO-based request to add clusters, putting the provider
* into an error state.
* @params {Object} options
* @param {string} [options.reason] Reason for cancelation (becomes error message).
* Defaults to "user canceled" message.
* @param {boolean} [options.notify] If true, error notification will be displayed;
* otherwise, error is silent.
*/
ssoCancelAddClusters({
reason = strings.clusterActionsProvider.error.sso.addClustersUserCanceled(),
notify = false,
} = {}) {
if (pr.store.ssoAddClustersInProgress) {
pr.store.ssoAddClustersInProgress = false;
pr.loading = false;
pr.loaded = true;
pr.error = reason;
if (notify) {
pr.notifyIfError();
}
pr.onChange();
}
},
/**
* [ASYNC] Add the specified cluster via kubeConfig to Lens.
* @param {Object} options
* @param {string} options.savePath Absolute path where kubeConfigs are to be saved.
* @param {string} options.cloudUrl MCC URL. Must NOT end with a slash.
* @param {Object} options.kubeConfig Kubeconfig object for the cluster to add.
* @param {string} options.namespace MCC namespace to which the cluster belongs.
* @param {string} options.clusterName Name of the cluster.
* @param {string} options.clusterId ID of the cluster.
*/
addKubeCluster(options) {
if (!pr.loading) {
_addKubeCluster(options);
}
},
/**
* [ASYNC] Activate the specified cluster in Lens if it already exists.
* @param {Object} options
* @param {string} options.namespace MCC namespace to which the cluster belongs.
* @param {string} options.clusterName Name of the cluster.
* @param {string} options.clusterId ID of the cluster.
*/
activateCluster(options) {
if (!pr.loading) {
_activateCluster(options);
}
},
/** Resets store state. Data will need to be reloaded. */
reset() {
if (!pr.loading) {
pr.reset();
}
},
},
};
};
export const ClusterActionsProvider = function ({ ...props }) {
// NOTE: since the state is passed directly (by reference) into the context
// returned by the provider, even the initial state should be a clone of the
// `store` so that we consistently return a `state` property (in the context)
// that is a shallow clone of the `store`
const [state, setState] = useState(pr.clone());
const value = useMemo(() => [state, setState], [state]);
pr.setState = setState;
return <ClusterActionsContext.Provider value={value} {...props} />;
};
<|start_filename|>src/util/deepFreeze.js<|end_filename|>
/**
* Freezes all objects deep into all enumerable keys (and within any arrays),
* including `obj` itself. An _object_ is anything that is `typeof` "object",
* except for arrays. Therefore, a `RegExp`, for example, would get frozen,
* but a `function` would not.
*
* This function is meant to be used with plain objects and arrays.
*
* NOTE: `obj` is frozen in-place. A copy is not created.
*
* @param {Object} obj The object to freeze, and whose enumerable object keys will
* be recursively frozen.
* @returns {Object} The now-frozen `obj`. If `obj` is not an array or object,
* it is simply returned verbatim.
*/
export const deepFreeze = function (obj) {
// only objects (possibly nested in arrays) need freezing
// NOTE: typeof [] === 'object' so we cover the array case here too
if (obj && typeof obj === 'object') {
let values;
if (Array.isArray(obj)) {
values = obj; // don't freeze arrays
} else {
Object.freeze(obj); // shallow freeze
values = Object.values(obj);
}
values.forEach((v) => deepFreeze(v)); // recursive
}
return obj;
};
<|start_filename|>test/mocks/mockNamespaces.js<|end_filename|>
export const mockNamespaces = [
{
id: '0',
name: 'foo',
deleteInProgress: false,
phase: 'phase',
},
{
id: '1',
name: 'bar',
deleteInProgress: false,
phase: 'phase',
},
{
id: '2',
name: 'baz',
deleteInProgress: false,
phase: 'phase',
},
];
<|start_filename|>src/typesets.js<|end_filename|>
//
// RTV Typesets for runtime validations
//
import * as rtv from 'rtvjs';
/**
* Describes an object used to create a new instance of a `Common.Catalog.KubernetesCluster`
* object that gets added to the Lens Catalog.
*/
export const clusterModelTs = {
metadata: {
//
// NATIVE PROPERTIES
//
uid: rtv.STRING,
name: rtv.STRING,
source: [rtv.OPTIONAL, rtv.STRING],
labels: [
rtv.OPTIONAL,
rtv.HASH_MAP,
{
$values: rtv.STRING,
},
],
//
// CUSTOM PROPERTIES
//
namespace: rtv.STRING,
cloudUrl: rtv.STRING, // URL of the MCC instance to which this cluster belongs
},
spec: {
kubeconfigPath: rtv.STRING, // absolute path
kubeconfigContext: rtv.STRING,
},
status: {
phase: [
rtv.STRING,
{ oneOf: ['connecting', 'connected', 'disconnecting', 'disconnected'] },
],
},
};
/**
* Describes a `Common.Catalog.KubernetesCluster` object that we get from iterating
* "entities" of this type in the Catalog, as well as from getting the active entity
* via `Renderer.Catalog.catalogEntities.activeEntity` (which is a mobx observable).
*/
export const clusterEntityTs = { ...clusterModelTs };
<|start_filename|>src/renderer/store/SsoAuthProvider.js<|end_filename|>
//
// SSO Authentication Provider (Keycloak)
//
import { createContext, useContext, useState, useMemo } from 'react';
import { Common } from '@k8slens/extensions';
import { ProviderStore } from './ProviderStore';
import * as strings from '../../strings';
import { logger } from '../../util/logger';
import { extractJwtPayload } from '../auth/authUtil';
import { AuthClient } from '../auth/clients/AuthClient';
const { Util } = Common;
//
// Store
//
class SsoAuthProviderStore extends ProviderStore {
// basic store is all that is needed for now
}
const pr = new SsoAuthProviderStore();
//
// Internal Methods
//
/**
* [ASYNC] Start authorization with MCC to get the temp access code via the
* redirect URI that will use the 'lens://' protocol to redirect the user
* to Lens and ultimately call back into `_finishAuthorization()`.
* @param {Object} options
* @param {Object} options.config MCC Config object.
*/
const _startAuthorization = async function ({ config }) {
pr.reset(true);
const authClient = new AuthClient({ config });
if (config.keycloakLogin) {
const url = authClient.getSsoAuthUrl();
Util.openExternal(url); // open in default browser
} else {
pr.loading = false;
pr.loaded = true;
pr.error = strings.ssoAuthProvider.error.basicOnly();
}
pr.notifyIfError();
pr.onChange();
};
/**
* [ASYNC] Completes the authorization process by exchanging the temp access code
* for access tokens.
*
* NOTE: This method ASSUMES `loading` is `true` (i.e. load is in progress).
*
* @param {Object} options
* @param {Object} options.oAuth OAuth response data from request for auth code.
* See `extEventOauthCodeTs` typeset in `eventBus.ts` for expected shape.
* @param {Object} options.config MCC Config object.
* @param {AuthAccess} options.authAccess Current authentication information.
* This instance WILL be cleared and updated with new tokens.
*/
const _finishAuthorization = async function ({ oAuth, config, authAccess }) {
if (!pr.loading) {
// ignore rogue request to complete auth if it was canceled in Lens, but then
// user completed request in browser for some reason
return;
}
const authClient = new AuthClient({ config });
let body;
let error;
if (oAuth.code) {
({ body, error } = await authClient.getToken({ authCode: oAuth.code }));
} else {
// no code, something went wrong
error = oAuth.error || oAuth.error_description || 'unknown';
}
pr.loading = false;
pr.loaded = true;
if (error) {
logger.error(
'SsoAuthProvider._finishAuthorization()',
`Failed to get tokens from authorization code, error="${error}"`
);
pr.error = strings.ssoAuthProvider.error.authCode();
} else {
const jwt = extractJwtPayload(body.id_token);
if (jwt.preferred_username) {
authAccess.updateTokens(body);
authAccess.username = jwt.preferred_username;
authAccess.usesSso = true;
} else {
logger.error(
'SsoAuthProvider._finishAuthorization()',
'Failed to get username from token JWT'
);
pr.error = strings.ssoAuthProvider.error.authCode();
}
}
pr.notifyIfError();
pr.onChange();
};
//
// Provider Definition
//
const SsoAuthContext = createContext();
export const useSsoAuth = function () {
const context = useContext(SsoAuthContext);
if (!context) {
throw new Error('useSsoAuth must be used within an SsoAuthProvider');
}
// NOTE: `context` is the value of the `value` prop we set on the
// <SsoAuthContext.Provider value={...}/> we return as the <SsoAuthProvider/>
// component to wrap all children that should have access to the state (i.e.
// all the children that will be able to `useSsoAuth()` to access the state)
const [state] = context;
// this is what you actually get from `useSsoAuth()` when you consume it
return {
state,
//// ACTIONS
actions: {
/**
* Begins the authorization process for the Lens app with the MCC server via
* user SSO authentication.
* @param {Object} options
* @param {Object} options.config MCC Config object.
*/
startAuthorization(options) {
if (!pr.loading) {
// must not be loading (we can only handle one load at a time)
_startAuthorization(options);
}
},
/**
* Finishes the authorization process given the temporary access code, by
* exchanging it for access tokens.
* @param {Object} options
* @param {string} code Temporary access code.
* @param {Object} options.config MCC Config object.
* @param {AuthAccess} options.authAccess Current authentication information.
* This instance WILL be cleared and updated with new tokens.
*/
finishAuthorization(options) {
if (pr.loading) {
// must be loading (i.e. in the middle of the process)
_finishAuthorization(options);
}
},
/**
* Imperatively update the loaded state to `true`. Use this if you already
* have a valid AuthAccess instance and don't need to authenticate.
*/
setAuthorized() {
if (!pr.loading) {
pr.loaded = true;
pr.error = undefined;
pr.onChange();
}
},
/**
* Cancels an outstanding request, putting the provider into an error state.
* @params {Object} options
* @param {string} [options.reason] Reason for cancelation (becomes error message).
* Defaults to "user canceled" message.
* @param {boolean} [options.notify] If true, error notification will be displayed;
* otherwise, error is silent.
*/
cancel({
reason = strings.ssoAuthProvider.error.userCanceled(),
notify = false,
} = {}) {
if (pr.loading) {
pr.loading = false;
pr.loaded = true;
pr.error = reason;
if (notify) {
pr.notifyIfError();
}
pr.onChange();
}
},
/** Resets store state. Data will need to be reloaded. */
reset() {
if (!pr.loading) {
pr.reset();
}
},
},
};
};
export const SsoAuthProvider = function (props) {
// NOTE: since the state is passed directly (by reference) into the context
// returned by the provider, even the initial state should be a clone of the
// `store` so that we consistently return a `state` property (in the context)
// that is a shallow clone of the `store`
const [state, setState] = useState(pr.clone());
const value = useMemo(() => [state, setState], [state]);
pr.setState = setState;
return <SsoAuthContext.Provider value={value} {...props} />;
};
<|start_filename|>src/constants.js<|end_filename|>
import pkg from '../package.json';
import { deepFreeze } from './util/deepFreeze';
/** Lens Catalog-related constants. */
export const catalog = deepFreeze({
/**
* Name of the boolean label added to all clusters added to the Lens Catalog by this
* extension, e.g. "mcc=true".
*/
source: pkg.name,
/** Label names. */
labels: {
/** Label identifying the cluster as coming from an MCC instance. */
source: 'mcc',
/** Label identifying the cluster's namespace. It's called a "project" in MCC UI. */
namespace: 'project',
},
/** Lens entities (built-in). */
entities: {
/** `Common.Catalog.KubernetesCluster` */
kubeCluster: {
/** Entity type (there may be multiple versions in the future; see `versions`). */
kind: 'KubernetesCluster',
/** Generic group (could be clusters of any version). */
group: 'entity.k8slens.dev',
/** API versions of KubernetesCluster object kinds. */
versions: {
v1alpha1: 'entity.k8slens.dev/v1alpha1', // Common.Catalog.KubernetesCluster class
},
},
},
});
/** IPC events */
export const ipcEvents = deepFreeze({
/** Send to both `main` and `renderer` processes. No response. No awaiting. */
broadcast: {
/** Signature: (event: string, level: string, context: string, message: string, ...args: Array) => void */
LOGGER: 'logger',
/** Signature: (event: string, clusterIds: Array<string>) => void */
CLUSTERS_ADDED: 'clustersAdded',
/** Signature: (event: string, clusterIds: Array<string>) => void */
CLUSTERS_REMOVED: 'clustersRemoved',
},
/** Invoked on the `main` process only. Returns a promise to be awaited. */
invoke: {
/** Signature: (event: string, models: Array<ClusterModel>) => void */
ADD_CLUSTERS: 'addClusters',
/** Signature: (event: string, clusterId: string) => void */
REMOVE_CLUSTER: 'removeCluster',
/** Signature: (event: string, clusterId: string) => void */
DELETE_CLUSTER: 'deleteCluster',
},
});
<|start_filename|>src/renderer/components/GlobalPage/ClusterList.js<|end_filename|>
import { useState, useEffect } from 'react';
import propTypes from 'prop-types';
import styled from '@emotion/styled';
import { Renderer } from '@k8slens/extensions';
import { useClusterLoadingState } from '../../hooks/useClusterLoadingState';
import { Cluster } from '../../store/Cluster';
import { Section } from '../Section';
import { InlineNotice, types as noticeTypes, iconSizes } from '../InlineNotice';
import { layout, mixinFlexColumnGaps } from '../styles';
import { getLensClusters } from '../../rendererUtil';
import { IpcRenderer } from '../../IpcRenderer';
import * as strings from '../../../strings';
import { ipcEvents } from '../../../constants';
const { Component } = Renderer;
const CheckList = styled.div(function () {
return {
...mixinFlexColumnGaps(layout.pad),
backgroundColor: 'var(--mainBackground)',
padding: layout.pad,
overflow: 'auto',
borderRadius: layout.grid,
borderStyle: 'solid',
borderWidth: 1,
borderColor: 'var(--borderColor)',
boxShadow: '0 0 4px 0 inset var(--boxShadow)',
};
});
export const ClusterList = function ({
clusters,
onlyNamespaces,
selectedClusters,
singleSelectOnly,
onSelection,
onSelectAll,
}) {
//
// STATE
//
const loading = useClusterLoadingState();
// only ready clusters can actually be selected
const selectableClusters = clusters.filter((cl) => cl.ready);
const [lensClusters, setLensClusters] = useState(getLensClusters());
//
// EVENTS
//
const handleClusterSelect = function (selected, cluster) {
if (typeof onSelection === 'function') {
onSelection({ cluster, selected });
}
};
const handleSelectAllNone = function () {
if (typeof onSelectAll === 'function') {
onSelectAll({
selected: selectedClusters.length < selectableClusters.length,
});
}
};
const handleIpcClustersChanged = function () {
setLensClusters(getLensClusters());
};
//
// EFFECTS
//
useEffect(function () {
const disposeClusterAdded = IpcRenderer.getInstance().listen(
ipcEvents.broadcast.CLUSTERS_ADDED,
handleIpcClustersChanged
);
const disposeClusterRemoved = IpcRenderer.getInstance().listen(
ipcEvents.broadcast.CLUSTERS_REMOVED,
handleIpcClustersChanged
);
return function () {
disposeClusterAdded();
disposeClusterRemoved();
};
}, []);
//
// RENDER
//
const isClusterSelected = function (cluster) {
return !!selectedClusters.find((c) => c.id === cluster.id);
};
// first by namespace, then by name
const compareClusters = function (left, right) {
const nsCompare = left.namespace.localeCompare(right.namespace);
if (nsCompare !== 0) {
return nsCompare;
}
return left.name.localeCompare(right.name);
};
return (
<Section className="lecc-ClusterList">
<h3>{strings.clusterList.title()}</h3>
{onlyNamespaces && (
<p>{strings.clusterList.onlyNamespaces(onlyNamespaces)}</p>
)}
{singleSelectOnly && (
<InlineNotice type={noticeTypes.WARNING} iconSize={iconSizes.SMALL}>
<small
dangerouslySetInnerHTML={{
__html: strings.clusterList.ssoLimitationHtml(),
}}
/>
</InlineNotice>
)}
<CheckList>
{clusters.sort(compareClusters).map(
(
cluster // list ALL clusters
) => {
const inLens = lensClusters.find(
(lc) => lc.metadata.uid === cluster.id
);
return (
<Component.Checkbox
key={cluster.id}
label={`${cluster.namespace} / ${cluster.name}${
cluster.ready
? inLens
? ` ${strings.clusterList.alreadyInLens()}`
: ''
: ` ${strings.clusterList.notReady()}`
}`}
disabled={!cluster.ready || inLens || loading}
value={isClusterSelected(cluster)}
onChange={(checked) => handleClusterSelect(checked, cluster)}
/>
);
}
)}
</CheckList>
{!singleSelectOnly && (
<div>
<Component.Button
primary
disabled={loading || selectableClusters.length <= 0}
label={
selectedClusters.length < selectableClusters.length
? strings.clusterList.action.selectAll.label()
: strings.clusterList.action.selectNone.label()
}
onClick={handleSelectAllNone}
/>
</div>
)}
</Section>
);
};
ClusterList.propTypes = {
clusters: propTypes.arrayOf(propTypes.instanceOf(Cluster)), // ALL clusters, even non-ready ones
onlyNamespaces: propTypes.arrayOf(propTypes.string), // optional list of namespace IDs to which the list is restricted
selectedClusters: propTypes.arrayOf(propTypes.instanceOf(Cluster)),
singleSelectOnly: propTypes.bool, // true if only one cluster may be selected; false if any number can be selected
onSelection: propTypes.func, // ({ cluster: Cluster, selected: boolean }) => void
onSelectAll: propTypes.func, // ({ selected: boolean }) => void
};
ClusterList.defaultProps = {
singleSelectOnly: false,
clusters: [],
selectedClusters: [],
};
<|start_filename|>src/renderer/components/GlobalPage/Login.js<|end_filename|>
import { useState, useEffect, useCallback } from 'react';
import styled from '@emotion/styled';
import { Renderer } from '@k8slens/extensions';
import { layout } from '../styles';
import { Section } from '../Section';
import { useExtState } from '../../store/ExtStateProvider';
import { useConfig } from '../../store/ConfigProvider';
import { useSsoAuth } from '../../store/SsoAuthProvider';
import { useClusterData } from '../../store/ClusterDataProvider';
import { useClusterLoadingState } from '../../hooks/useClusterLoadingState';
import { InlineNotice } from '../InlineNotice';
import { normalizeUrl } from '../../../util/netUtil';
import * as strings from '../../../strings';
const {
Component: { Notifications, Input, Button },
} = Renderer;
const urlClassName = 'lecc-Login--url';
const Field = styled.div(function () {
return {
display: 'flex',
alignItems: 'center',
marginBottom: layout.gap,
':last-child': {
marginBottom: 0,
},
[`div.Input.${urlClassName}`]: {
flex: 1,
},
'> label': {
minWidth: layout.grid * 23,
marginRight: `${layout.pad}px`,
},
};
});
export const Login = function () {
//
// STATE
//
const {
state: {
authAccess,
prefs: { cloudUrl },
},
actions: extActions,
} = useExtState();
const {
state: {
loading: configLoading,
loaded: configLoaded,
error: configError,
config,
},
actions: configActions,
} = useConfig();
const {
state: { loading: ssoAuthLoading },
actions: ssoAuthActions,
} = useSsoAuth();
const {
state: {
loading: clusterDataLoading,
loaded: clusterDataLoaded,
error: clusterDataError,
},
actions: clusterDataActions,
} = useClusterData();
// NOTE: while this does include the individual flags above, it may include
// others we don't need to know details about here, but still need to be
// responsive to
const loading = useClusterLoadingState();
const [url, setUrl] = useState(cloudUrl || '');
const [refreshing, setRefreshing] = useState(false); // if we're just reloading clusters
// {boolean} true if user has clicked the Access button; false otherwise
const [connectClicked, setConnectClicked] = useState(false);
const usesSso = !!config?.keycloakLogin;
//
// EVENTS
//
const startLogin = useCallback(
function () {
authAccess.resetCredentials();
authAccess.resetTokens();
authAccess.usesSso = usesSso;
// capture changes to auth details so far, and trigger SSO login in
// useClusterLoader() effect (because this will result in an updated authAccess
// object that has the right configuration per updates above)
extActions.setAuthAccess(authAccess);
},
[authAccess, extActions, usesSso]
);
// returns true if refresh is possible and has started; false otherwise
const tryRefresh = function () {
if (
clusterDataLoaded &&
!clusterDataError &&
url === cloudUrl &&
authAccess.isValid() &&
authAccess.usesSso
) {
// just do a cluster data refresh instead of going through auth again
setRefreshing(true);
clusterDataActions.load({ cloudUrl, config, authAccess });
return true;
}
return false;
};
const handleUrlChange = function (value) {
setUrl(value);
};
const handleConnectClick = function () {
if (!tryRefresh()) {
const normUrl = normalizeUrl(url);
setUrl(normUrl); // update to actual URL we'll use
setConnectClicked(true);
ssoAuthActions.reset();
clusterDataActions.reset();
// we're accessing a different instance, so nothing we may have already will
// work there
authAccess.resetCredentials();
authAccess.resetTokens();
extActions.setAuthAccess(authAccess);
// save URL as `cloudUrl` in preferences since the user claims it's valid
extActions.setCloudUrl(normUrl);
// NOTE: if the config loads successfully and we see that the instance is
// set for SSO auth, our effect() below that checks for `configLoaded`
// will auto-trigger onLogin(), which will then trigger SSO auth
configActions.load(normUrl); // implicit reset of current config, if any
}
};
const handleSsoCancelClick = function () {
ssoAuthActions.cancel();
setConnectClicked(false);
};
//
// EFFECTS
//
useEffect(
function () {
if (refreshing && !clusterDataLoading && clusterDataLoaded) {
setRefreshing(false);
}
},
[refreshing, clusterDataLoading, clusterDataLoaded]
);
// on load, if we already have an instance URL but haven't yet loaded the config,
// load it immediately so we can know right away if it supports SSO or not, and
// save some time when the user clicks Connect
useEffect(
function () {
if (cloudUrl && !configLoading && !configLoaded) {
configActions.load(cloudUrl);
}
},
[cloudUrl, configLoading, configLoaded, configActions]
);
useEffect(
function () {
if (configLoaded && !configError && connectClicked) {
setConnectClicked(false);
// start the SSO login process if the instance uses SSO since the user has
// clicked on the Connect button indicating intent to take action
if (usesSso) {
startLogin();
} else {
Notifications.error(
`${strings.login.error.basicAuth()} ${strings.noteOwner}`
);
}
}
},
[
configLoaded,
configError,
config,
url,
extActions,
startLogin,
connectClicked,
usesSso,
]
);
//
// RENDER
//
return (
<Section className="lecc-Login">
<h3>{strings.login.title()}</h3>
<Field>
<label htmlFor="lecc-login-url">{strings.login.url.label()}</label>
<Input
type="text"
className={urlClassName}
theme="round-black" // borders on all sides, rounded corners
id="lecc-login-url"
disabled={loading}
value={url}
onChange={handleUrlChange}
/>
</Field>
<div>
<Button
primary
disabled={loading}
label={
refreshing ||
(url === cloudUrl && clusterDataLoaded && !clusterDataError)
? strings.login.action.refresh()
: strings.login.action.connect()
}
waiting={configLoading || ssoAuthLoading || refreshing}
onClick={handleConnectClick}
/>
</div>
{ssoAuthLoading && (
<>
<InlineNotice>
<p
dangerouslySetInnerHTML={{
__html: strings.login.sso.messageHtml(),
}}
/>
</InlineNotice>
<div>
<Button
primary
label={strings.login.action.ssoCancel()}
onClick={handleSsoCancelClick}
/>
</div>
</>
)}
</Section>
);
};
<|start_filename|>src/renderer/rendererUtil.js<|end_filename|>
//
// Renderer-specific utilities
//
import { Renderer } from '@k8slens/extensions';
import * as consts from '../constants';
const { Catalog } = Renderer;
/**
* @returns {Array<Common.Catalog.KubernetesCluster>} List of clusters in the
* Lens Catalog.
*/
export const getLensClusters = function () {
return Catalog.catalogEntities.getItemsForApiKind(
consts.catalog.entities.kubeCluster.versions.v1alpha1,
consts.catalog.entities.kubeCluster.kind
);
};
<|start_filename|>src/main/IpcMain.js<|end_filename|>
//
// Main Process IPC API
//
import { promises as fs } from 'fs';
import { observable } from 'mobx';
import { Main, Common } from '@k8slens/extensions';
import * as rtv from 'rtvjs';
import { clusterModelTs } from '../typesets';
import { clusterStore } from '../store/ClusterStore';
import { logger } from '../util/logger';
import pkg from '../../package.json';
import { ipcEvents } from '../constants';
const {
Catalog: { KubernetesCluster },
} = Common;
// typeset for the capture() method
const captureTs = {
level: [rtv.STRING, { oneOf: Object.keys(console) }],
context: rtv.STRING,
message: rtv.STRING,
};
export const catalogSource = observable.array([]);
export class IpcMain extends Main.Ipc {
//
// HANDLERS
//
/**
* Adds specified clusters to the Catalog.
* @param {string} event Identifier provided by Lens.
* @param {Array<ClusterModel>} models Cluster models to use to add clusters to the Catalog
* as new `Common.Catalog.KubernetesCluster` objects.
*/
onAddClusters = (event, models) => {
this.addClusters(models);
this.capture(
'log',
'onAddClusters()',
`Added ${
models.length
} clusters to catalog and clusterStore, clusters=[${models
.map((model) => model.metadata.uid)
.join(', ')}]`
);
this.broadcast(
ipcEvents.broadcast.CLUSTERS_ADDED,
models.map((model) => model.metadata.uid)
);
};
/**
* Removes a cluster from the Catalog without removing the cluster's kubeConfig from disk.
* @param {string} event Identifier provided by Lens.
* @param {string} clusterId ID of the cluster to remove.
*/
onRemoveCluster = (event, clusterId) => {
DEV_ENV && rtv.verify({ clusterId }, { clusterId: rtv.STRING });
if (this.removeCluster(clusterId)) {
this.capture(
'log',
'onRemoveCluster()',
`Removed cluster ${clusterId} from catalog and clusterStore, kubeConfig file untouched`
);
this.broadcast(ipcEvents.broadcast.CLUSTERS_REMOVED, [clusterId]);
}
};
/**
* [ASYNC] Removes a cluster from the Catalog AND removes the cluster's kubeConfig from disk.
* @param {string} event Identifier provided by Lens.
* @param {string} clusterId ID of the cluster to delete.
*/
onDeleteCluster = async (event, clusterId) => {
DEV_ENV && rtv.verify({ clusterId }, { clusterId: rtv.STRING });
const model = clusterStore.models.find((m) => m.metadata.uid === clusterId);
let removed = this.removeCluster(clusterId);
if (model) {
try {
const stats = await fs.stat(model.spec.kubeconfigPath);
if (stats.isFile()) {
await fs.unlink(model.spec.kubeconfigPath);
this.capture(
'log',
'onDeleteCluster()',
`Cluster kubeConfig file deleted, clusterId=${clusterId}, path=${model.spec.kubeconfigPath}`
);
removed = true;
}
} catch (err) {
this.capture(
'error',
'onDeleteCluster()',
`Failed to delete kubeConfig file, clusterId=${clusterId}, error="${err.message}"`
);
}
}
if (removed) {
this.capture(
'log',
'onDeleteCluster()',
`Removed cluster ${clusterId} from catalog and clusterStore, kubeConfig file deleted`
);
this.broadcast(ipcEvents.broadcast.CLUSTERS_REMOVED, [clusterId]);
}
};
//
// SINGLETON
//
constructor(extension) {
super(extension);
extension.addCatalogSource(pkg.name, catalogSource);
this.restoreClusters();
this.handle(ipcEvents.invoke.ADD_CLUSTERS, this.onAddClusters);
this.handle(ipcEvents.invoke.REMOVE_CLUSTER, this.onRemoveCluster);
this.handle(ipcEvents.invoke.DELETE_CLUSTER, this.onDeleteCluster);
}
/**
* Logs a message to the `logger` and broadcasts it to the Renderer.
* @param {string} level Logger/console method, e.g. 'log' or 'warn'.
* @param {string} context Identifies where the message came from, e.g. 'methodName()'.
* The prefix "IpcMain." is added to this string.
* @param {string} message Log message.
* @param {...any} rest Anything else to pass to the Logger to be printed as data
* or parsed with '%s' placeholders in the message (same as `console` API).
*/
capture(level, context, message, ...rest) {
DEV_ENV && rtv.verify({ level, context, message }, captureTs);
const params = [`IpcMain.${context}`, message, ...rest];
logger[level](...params);
this.broadcast(ipcEvents.broadcast.LOGGER, level, ...params);
}
/**
* Restores clusters from the `clusterStore` to the `catalogSource`.
*/
restoreClusters() {
this.addClusters(clusterStore.models, false);
this.capture(
'log',
'restoreClusters()',
`Restored ${
clusterStore.models.length
} clusters from clusterStore into catalog, clusters=[${clusterStore.models
.map((model) => model.metadata.uid)
.join(', ')}]`
);
}
/**
* Adds clusters to the `catalogSource`.
* @param {Array<ClusterModel>} models Cluster models to add.
* @param {boolean} [persist] If false, models will not be persisted to the store.
*/
addClusters(models, persist = true) {
DEV_ENV && rtv.verify({ models }, { models: [[clusterModelTs]] });
models.forEach((model) => {
// officially add to Lens
this.capture(
'log',
'addClusters()',
`adding cluster to catalog, clusterId=${model.metadata.uid}, name=${model.metadata.name}, namespace=${model.metadata.namespace}`
);
catalogSource.push(new KubernetesCluster(model));
if (persist) {
clusterStore.models.push(model);
}
});
}
/**
* Removes the cluster from the `catalogSource` and `clusterStore`, if found.
* @param {string} clusterId ID of the cluster to remove.
* @returns {boolean} True if the cluster was removed from at least one of the two;
* false if it didn't exist anywhere.
*/
removeCluster(clusterId) {
let removed = false;
const clusterIdx = catalogSource.findIndex(
(kc) => kc.metadata.uid === clusterId
);
if (clusterIdx >= 0) {
catalogSource.splice(clusterIdx, 1);
removed = true;
this.capture(
'log',
'removeCluster()',
`Removed cluster from catalog, clusterId=${clusterId}`
);
}
const modelIdx = clusterStore.models.findIndex(
(model) => model.metadata.uid === clusterId
);
if (modelIdx >= 0) {
clusterStore.models.splice(modelIdx, 1);
removed = true;
this.capture(
'log',
'removeCluster()',
`Removed cluster from clusterStore, clusterId=${clusterId}`
);
}
return removed;
}
}
<|start_filename|>src/renderer/components/styles.js<|end_filename|>
const grid = 4; // (px) grid unit
// Layout is not theme-specific, should we ever end-up with a light and dark theme
export const layout = {
grid,
pad: grid * 2, // padding or margin
gap: grid * 4, // matches flex.box 'gaps' that the Lens Add Cluster Wizard uses
};
/**
* Styles that define a flex container, column-oriented, with consistent gaps
* between its _immediate_ children.
* @param {number} [gap] Space, in pixels, between immediate children.
* @returns {Object} Emotion styles object.
*/
export const mixinFlexColumnGaps = function (gap = layout.gap) {
return {
display: 'flex',
flexDirection: 'column',
// separate each immediate child (i.e. sections) by a common gap except for the last one
'> *': {
marginBottom: gap,
},
'> *:last-child': {
marginBottom: 0,
},
};
};
/**
* Styles for columns in a Flex column layout.
*/
export const mixinColumnStyles = function () {
return {
// as flex children, grow/shrink evenly
flex: 1,
// as flex containers
...mixinFlexColumnGaps(layout.grid * 6),
borderRadius: layout.grid,
backgroundColor: 'var(--contentColor)',
marginRight: layout.gap,
padding: layout.gap,
overflow: 'auto',
};
};
/**
* Common styles for pages.
*/
export const mixinPageStyles = function () {
return {
padding: layout.gap,
backgroundColor: 'var(--mainBackground)',
// style all <code> elements herein
code: {
// TODO: remove once https://github.com/lensapp/lens/issues/1683 is fixed
// TRACKING: https://github.com/Mirantis/lens-extension-cc/issues/27
fontSize: 'calc(var(--font-size) * .9)',
},
};
};
<|start_filename|>webpack.config.js<|end_filename|>
//
// Webpack build configuration
//
// Environment Variables:
// - TARGET: Either 'development' or 'production' (default).
// - DEV_UNSAFE_NO_CERT: Set to 'thisisunsafe' to disable TLS certificate verification on MCC instances
// - FEAT_CLUSTER_PAGE_ENABLED: Set to 1 to enable the Cluster Page feature. Disabled by default.
//
const path = require('path');
const babelConfig = require('./babel.config');
const { DefinePlugin } = require('webpack');
const buildTarget = process.env.TARGET || 'production';
const loaders = [
{
test: /\.m?js$/,
exclude: /node_modules/,
use: {
loader: 'babel-loader',
options: babelConfig,
},
},
{
test: /\.tsx?$/,
use: 'ts-loader',
exclude: /node_modules/,
},
];
const plugins = [
new DefinePlugin({
DEV_ENV: JSON.stringify(buildTarget !== 'production'),
DEV_UNSAFE_NO_CERT: JSON.stringify(
buildTarget !== 'production' &&
process.env.DEV_UNSAFE_NO_CERT === 'thisisunsafe'
),
FEAT_CLUSTER_PAGE_ENABLED: JSON.stringify(
!!Number(process.env.FEAT_CLUSTER_PAGE_ENABLED)
),
'process.env.TARGET': JSON.stringify(buildTarget),
}),
];
module.exports = [
{
entry: './src/main/main.ts',
context: __dirname,
target: 'electron-main',
mode: buildTarget,
devtool:
process.env.TARGET !== 'production' ? 'eval-source-map' : undefined,
module: {
rules: [...loaders],
},
externals: [
{
'@k8slens/extensions': 'var global.LensExtensions',
mobx: 'var global.Mobx',
react: 'var global.React',
},
],
plugins,
resolve: {
extensions: ['.tsx', '.ts', '.js'],
},
output: {
libraryTarget: 'commonjs2',
filename: 'main.js',
path: path.resolve(__dirname, 'dist'),
},
},
{
entry: './src/renderer/renderer.tsx',
context: __dirname,
target: 'electron-renderer',
mode: buildTarget,
devtool:
process.env.TARGET !== 'production' ? 'eval-source-map' : undefined,
module: {
rules: [...loaders],
},
externals: [
{
'@k8slens/extensions': 'var global.LensExtensions',
mobx: 'var global.Mobx',
react: 'var global.React',
},
],
plugins,
resolve: {
extensions: ['.tsx', '.ts', '.js'],
// exclude any 'browser' targets, like node-fetch which will default to
// native (browser) fetch and cause CORS issues when connecting with MCC
mainFields: ['module', 'main'],
},
output: {
libraryTarget: 'commonjs2',
globalObject: 'this',
filename: 'renderer.js',
path: path.resolve(__dirname, 'dist'),
},
node: {
__dirname: false,
__filename: false,
},
},
];
| Mirantis/lens-extension-cc |
<|start_filename|>docs/reference/qc_examples.html<|end_filename|>
<!-- Generated by pkgdown: do not edit by hand -->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>QC metrics from four 10x Genomics single cell gene expression datasets. — qc_examples • DropletQC</title>
<!-- jquery -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.4.1/jquery.min.js" integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script>
<!-- Bootstrap -->
<link href="https://cdnjs.cloudflare.com/ajax/libs/bootswatch/3.4.0/flatly/bootstrap.min.css" rel="stylesheet" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.4.1/js/bootstrap.min.js" integrity="sha256-nuL8/2cJ5NDSSwnKD8VqreErSWHtnEP9E7AySL+1ev4=" crossorigin="anonymous"></script>
<!-- bootstrap-toc -->
<link rel="stylesheet" href="../bootstrap-toc.css">
<script src="../bootstrap-toc.js"></script>
<!-- Font Awesome icons -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/all.min.css" integrity="sha256-mmgLkCYLUQbXn0B1SRqzHar6dCnv9oZFPEC1g1cwlkk=" crossorigin="anonymous" />
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/v4-shims.min.css" integrity="sha256-wZjR52fzng1pJHwx4aV2AO3yyTOXrcDW7jBpJtTwVxw=" crossorigin="anonymous" />
<!-- clipboard.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/clipboard.js/2.0.6/clipboard.min.js" integrity="sha256-inc5kl9MA1hkeYUt+EC3BhlIgyp/2jDIyBLS6k3UxPI=" crossorigin="anonymous"></script>
<!-- headroom.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/headroom.min.js" integrity="sha256-AsUX4SJE1+yuDu5+mAVzJbuYNPHj/WroHuZ8Ir/CkE0=" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/jQuery.headroom.min.js" integrity="sha256-ZX/yNShbjqsohH1k95liqY9Gd8uOiE1S4vZc+9KQ1K4=" crossorigin="anonymous"></script>
<!-- pkgdown -->
<link href="../pkgdown.css" rel="stylesheet">
<script src="../pkgdown.js"></script>
<meta property="og:title" content="QC metrics from four 10x Genomics single cell gene expression datasets. — qc_examples" />
<meta property="og:description" content="This dataset contains a collection of summary and QC metrics from four
publicly available 10x Genomics single cell gene expression datasets:" />
<!-- mathjax -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js" integrity="sha256-nvJJv9wWKEm88qvoQl9ekL2J+k/RWIsaSScxxlsrv8k=" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/config/TeX-AMS-MML_HTMLorMML.js" integrity="sha256-84DKXVJXs0/F8OTMzX4UR909+jtl4G7SPypPavF+GfA=" crossorigin="anonymous"></script>
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body data-spy="scroll" data-target="#toc">
<div class="container template-reference-topic">
<header>
<div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<span class="navbar-brand">
<a class="navbar-link" href="../index.html">DropletQC</a>
<span class="version label label-default" data-toggle="tooltip" data-placement="bottom" title="Released version">0.0.0.9000</span>
</span>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li>
<a href="../index.html">
<span class="fas fa-home fa-lg"></span>
</a>
</li>
<li>
<a href="../articles/DropletQC.html">Get started</a>
</li>
<li>
<a href="../reference/index.html">Reference</a>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
</ul>
</div><!--/.nav-collapse -->
</div><!--/.container -->
</div><!--/.navbar -->
</header>
<div class="row">
<div class="col-md-9 contents">
<div class="page-header">
<h1>QC metrics from four 10x Genomics single cell gene expression datasets.</h1>
<div class="hidden name"><code>qc_examples.Rd</code></div>
</div>
<div class="ref-description">
<p>This dataset contains a collection of summary and QC metrics from four
publicly available 10x Genomics single cell gene expression datasets:</p>
</div>
<pre class="usage"><span class='va'>qc_examples</span></pre>
<h2 class="hasAnchor" id="format"><a class="anchor" href="#format"></a>Format</h2>
<p>A data frame with 27,597 rows and 15 variables:</p><dl>
<dt>sample</dt><dd><p>sample name, one of four values; GBM (glioblastoma), HL
(Hodgkin's lymphoma), MB (mouse brain) and PBMC (peripheral blood
mononuclear cells)</p></dd>
<dt>cell_barcode</dt><dd><p>the 16 nucleotide cell barcode e.g. AAACCCAAGGCGATAC-1</p></dd>
<dt>umap_1</dt><dd><p>UMAP coordinates 1, for visualisation</p></dd>
<dt>umap_2</dt><dd><p>UMAP coordinates 2, for visualisation</p></dd>
<dt>seurat_clusters</dt><dd><p>cell clusters identified with the Louvain algorithm
using default parameters implemented in the Seurat package</p></dd>
<dt>cell_type</dt><dd><p>a very rough cell type annotation</p></dd>
<dt>umi_count</dt><dd><p>the number of UMIs detected</p></dd>
<dt>log10_umi_count</dt><dd><p>log10 of the number of UMIs detected</p></dd>
<dt>percent_mt</dt><dd><p>percentage of UMIs mapping to mitochondrial genes</p></dd>
<dt>empty_drops_log_prob</dt><dd><p>the log-probability of observing the cell's
count vector under the null model - from DropletUtils::emptyDrops output</p></dd>
<dt>empty_drops_p_value</dt><dd><p>the Monte Carlo p-value against the null model -
from DropletUtils::emptyDrops output</p></dd>
<dt>empty_drops_fdr</dt><dd><p>FDR values returned from DropletUtils::emptyDrops
output</p></dd>
<dt>nuclear_fraction_droplet_qc</dt><dd><p>nuclear fraction metric calculated with
DropletQC</p></dd>
<dt>nuclear_fraction_velocyto</dt><dd><p>nuclear fraction metric calculated using
the output from velocyto</p></dd>
<dt>flag</dt><dd><p>assigned cell status, taking one of three values; cell,
damaged_cell, empty_droplet</p></dd>
</dl>
<h2 class="hasAnchor" id="source"><a class="anchor" href="#source"></a>Source</h2>
<p><a href='https://support.10xgenomics.com/single-cell-gene-expression/datasets/'>https://support.10xgenomics.com/single-cell-gene-expression/datasets/</a></p>
<h2 class="hasAnchor" id="details"><a class="anchor" href="#details"></a>Details</h2>
<ol>
<li><p>Human Glioblastoma Multiforme: 3’v3 Whole Transcriptome Analysis</p></li>
<li><p>Hodgkin's Lymphoma, Dissociated Tumor: Whole Transcriptome Analysis</p></li>
<li><p>10k Mouse E18 Combined Cortex, Hippocampus and Subventricular Zone Cells,
Dual Indexed</p></li>
<li><p>PBMCs from a Healthy Donor: Whole Transcriptome Analysis</p></li>
</ol>
<p>To be included in the dataset, cells were required to pass
DropletUtils::emptyDrops using the default FDR threshold of 1 and have a
maximum of 15 mitochondrial gene content. The included variables are as
follows:</p>
</div>
<div class="col-md-3 hidden-xs hidden-sm" id="pkgdown-sidebar">
<nav id="toc" data-toggle="toc" class="sticky-top">
<h2 data-toc-skip>Contents</h2>
</nav>
</div>
</div>
<footer>
<div class="copyright">
<p>Developed by <NAME>.</p>
</div>
<div class="pkgdown">
<p>Site built with <a href="https://pkgdown.r-lib.org/">pkgdown</a> 1.6.1.</p>
</div>
</footer>
</div>
</body>
</html>
<|start_filename|>docs/reference/identify_damaged_cells.html<|end_filename|>
<!-- Generated by pkgdown: do not edit by hand -->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Identify damaged cells — identify_damaged_cells • DropletQC</title>
<!-- jquery -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.4.1/jquery.min.js" integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script>
<!-- Bootstrap -->
<link href="https://cdnjs.cloudflare.com/ajax/libs/bootswatch/3.4.0/flatly/bootstrap.min.css" rel="stylesheet" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.4.1/js/bootstrap.min.js" integrity="sha256-nuL8/2cJ5NDSSwnKD8VqreErSWHtnEP9E7AySL+1ev4=" crossorigin="anonymous"></script>
<!-- bootstrap-toc -->
<link rel="stylesheet" href="../bootstrap-toc.css">
<script src="../bootstrap-toc.js"></script>
<!-- Font Awesome icons -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/all.min.css" integrity="sha256-mmgLkCYLUQbXn0B1SRqzHar6dCnv9oZFPEC1g1cwlkk=" crossorigin="anonymous" />
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/v4-shims.min.css" integrity="sha256-wZjR52fzng1pJHwx4aV2AO3yyTOXrcDW7jBpJtTwVxw=" crossorigin="anonymous" />
<!-- clipboard.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/clipboard.js/2.0.6/clipboard.min.js" integrity="sha256-inc5kl9MA1hkeYUt+EC3BhlIgyp/2jDIyBLS6k3UxPI=" crossorigin="anonymous"></script>
<!-- headroom.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/headroom.min.js" integrity="sha256-AsUX4SJE1+yuDu5+mAVzJbuYNPHj/WroHuZ8Ir/CkE0=" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/jQuery.headroom.min.js" integrity="sha256-ZX/yNShbjqsohH1k95liqY9Gd8uOiE1S4vZc+9KQ1K4=" crossorigin="anonymous"></script>
<!-- pkgdown -->
<link href="../pkgdown.css" rel="stylesheet">
<script src="../pkgdown.js"></script>
<meta property="og:title" content="Identify damaged cells — identify_damaged_cells" />
<meta property="og:description" content="This function uses a combination of the cell UMI counts and the
nuclear fraction score to assign each cell one of two values; "cell" or
"damaged_cell". This is based on the idea that damaged cells have a lower
UMI count and higher nuclear fraction than whole cells. The expected input
is a data frame with four columns. The first three columns should contain;
the nuclear fraction score, total UMIs and a character vector describing
each cell as "cell" or "empty_droplet". This is the format output by the
identify_empty_drops function. The fourth column should be a character
vector with user-assigned cell types. Internally, the provided data frame
is split by cell type and a Gaussian mixture model with a maximum of two
components is fit to the umi counts and nuclear fraction scores. The
parameters of the model are estimated using expectation maximisation (EM)
with the mclust package. The best model is selected using the Bayesian
Information Criterion (BIC). The two populations (cells and damaged cells)
are assumed to have equal variance (mclust model name "EEI")." />
<!-- mathjax -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js" integrity="sha256-nvJJv9wWKEm88qvoQl9ekL2J+k/RWIsaSScxxlsrv8k=" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/config/TeX-AMS-MML_HTMLorMML.js" integrity="sha256-84DKXVJXs0/F8OTMzX4UR909+jtl4G7SPypPavF+GfA=" crossorigin="anonymous"></script>
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body data-spy="scroll" data-target="#toc">
<div class="container template-reference-topic">
<header>
<div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<span class="navbar-brand">
<a class="navbar-link" href="../index.html">DropletQC</a>
<span class="version label label-default" data-toggle="tooltip" data-placement="bottom" title="Released version">0.0.0.9000</span>
</span>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li>
<a href="../index.html">
<span class="fas fa-home fa-lg"></span>
</a>
</li>
<li>
<a href="../articles/DropletQC.html">Get started</a>
</li>
<li>
<a href="../reference/index.html">Reference</a>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
</ul>
</div><!--/.nav-collapse -->
</div><!--/.container -->
</div><!--/.navbar -->
</header>
<div class="row">
<div class="col-md-9 contents">
<div class="page-header">
<h1>Identify damaged cells</h1>
<div class="hidden name"><code>identify_damaged_cells.Rd</code></div>
</div>
<div class="ref-description">
<p>This function uses a combination of the cell UMI counts and the
nuclear fraction score to assign each cell one of two values; "cell" or
"damaged_cell". This is based on the idea that damaged cells have a lower
UMI count and higher nuclear fraction than whole cells. The expected input
is a data frame with four columns. The first three columns should contain;
the nuclear fraction score, total UMIs and a character vector describing
each cell as "cell" or "empty_droplet". This is the format output by the
<code>identify_empty_drops</code> function. The fourth column should be a character
vector with user-assigned cell types. Internally, the provided data frame
is split by cell type and a Gaussian mixture model with a maximum of two
components is fit to the umi counts and nuclear fraction scores. The
parameters of the model are estimated using expectation maximisation (EM)
with the <code>mclust</code> package. The best model is selected using the Bayesian
Information Criterion (BIC). The two populations (cells and damaged cells)
are assumed to have equal variance (mclust model name "EEI").</p>
</div>
<pre class="usage"><span class='fu'>identify_damaged_cells</span><span class='op'>(</span>
<span class='va'>nf_umi_ed_ct</span>,
nf_sep <span class='op'>=</span> <span class='fl'>0.15</span>,
umi_sep_perc <span class='op'>=</span> <span class='fl'>50</span>,
output_plots <span class='op'>=</span> <span class='cn'>FALSE</span>,
verbose <span class='op'>=</span> <span class='cn'>TRUE</span>
<span class='op'>)</span></pre>
<h2 class="hasAnchor" id="arguments"><a class="anchor" href="#arguments"></a>Arguments</h2>
<table class="ref-arguments">
<colgroup><col class="name" /><col class="desc" /></colgroup>
<tr>
<th>nf_umi_ed_ct</th>
<td><p>data frame, with four columns. The first three columns
should match the output from the <code>identify_empty_drops</code> function. The
fourth column should contain cell type names.</p></td>
</tr>
<tr>
<th>nf_sep</th>
<td><p>numeric, the minimum separation of the nuclear fraction score
required between the cell and damage cell populations</p></td>
</tr>
<tr>
<th>umi_sep_perc</th>
<td><p>numeric, this is the minimum percentage of UMIs which the
damaged cell population is required to have compared to the cell
population. For example, if the mean UMI of the distribution fit to the
whole cell population is 10,000 UMIs, the mean of the distribution fit to
the damaged cell population must be at less than 7,000 UMIs if the umi_sep
parameter is 30 (%)</p></td>
</tr>
<tr>
<th>output_plots</th>
<td><p>logical, whether or not to return plots</p></td>
</tr>
<tr>
<th>verbose</th>
<td><p>logical, whether to print updates and progress while fitting
with EM</p></td>
</tr>
</table>
<h2 class="hasAnchor" id="value"><a class="anchor" href="#value"></a>Value</h2>
<p>list, of length two. The first element in the list contains a data
frame with the same dimensions input to the <code>nf_umi_ed_ct</code> argument, with
"damaged_cell" now recorded in the third column. The second element is NULL
unless <code>output_plots</code>=TRUE. If requested, three plots are returned for each
cell type in a named list, combined using <code><a href='https://rpkgs.datanovia.com/ggpubr/reference/ggarrange.html'>ggpubr::ggarrange</a></code>. For each
cell type, the first plot illustrates the cell and damaged cell populations
(if any) in a plot of nuclear fraction vs log10(UMI counts). Damaged cells
are expected to be in the lower right portion of the plot(lower UMI counts
and higher nuclear fraction). The second and third plots show the model
fits to the nuclear fraction and UMI count distributions respectively.
Solid lines indicate the distribution mean, while dashed lines indicate the
positions of the thrsholds controlled by the <code>nf_sep</code> and <code>umi_sep</code>
parameters.</p>
<h2 class="hasAnchor" id="examples"><a class="anchor" href="#examples"></a>Examples</h2>
<pre class="examples"><div class='input'><span class='co'>#1</span>
<span class='fu'><a href='https://rdrr.io/r/utils/data.html'>data</a></span><span class='op'>(</span><span class='st'>"qc_examples"</span><span class='op'>)</span>
<span class='va'>gbm</span> <span class='op'><-</span> <span class='va'>qc_examples</span><span class='op'>[</span><span class='va'>qc_examples</span><span class='op'>$</span><span class='va'>sample</span><span class='op'>==</span><span class='st'>"MB"</span>,<span class='op'>]</span>
<span class='va'>gbm.ed</span> <span class='op'><-</span> <span class='va'>gbm</span><span class='op'>[</span>,<span class='fu'><a href='https://rdrr.io/r/base/c.html'>c</a></span><span class='op'>(</span><span class='st'>"nuclear_fraction_droplet_qc"</span>,<span class='st'>"umi_count"</span><span class='op'>)</span><span class='op'>]</span>
<span class='va'>gbm.ed</span> <span class='op'><-</span> <span class='fu'><a href='identify_empty_drops.html'>identify_empty_drops</a></span><span class='op'>(</span>nf_umi <span class='op'>=</span> <span class='va'>gbm.ed</span><span class='op'>)</span>
<span class='va'>gbm.ed</span><span class='op'>$</span><span class='va'>cell_type</span> <span class='op'><-</span> <span class='va'>gbm</span><span class='op'>$</span><span class='va'>cell_type</span>
<span class='va'>gbm.ed.dc</span> <span class='op'><-</span> <span class='fu'>identify_damaged_cells</span><span class='op'>(</span><span class='va'>gbm.ed</span>, verbose<span class='op'>=</span><span class='cn'>FALSE</span><span class='op'>)</span>
<span class='va'>gbm.ed.dc</span> <span class='op'><-</span> <span class='va'>gbm.ed.dc</span><span class='op'>[[</span><span class='fl'>1</span><span class='op'>]</span><span class='op'>]</span>
<span class='fu'><a href='https://rdrr.io/r/utils/head.html'>head</a></span><span class='op'>(</span><span class='va'>gbm.ed.dc</span><span class='op'>)</span>
</div><div class='output co'>#> nuclear_fraction_droplet_qc umi_count cell_status
#> AAACCCACAAGAATAC-1 0.4176170 12557 cell
#> AAACCCACAATAAGGT-1 0.3744720 10958 cell
#> AAACCCACAGCCCACA-1 0.6492798 6095 cell
#> AAACCCACAGTAACGG-1 0.3900973 12802 cell
#> AAACCCACATAAGATG-1 0.1312698 13672 cell
#> AAACCCACATAATCGC-1 0.4443030 26653 cell
#> cell_type
#> AAACCCACAAGAATAC-1 neuron_unresolved_2
#> AAACCCACAATAAGGT-1 migrating_interneuron
#> AAACCCACAGCCCACA-1 neuron_hippocampus
#> AAACCCACAGTAACGG-1 migrating_interneuron
#> AAACCCACATAAGATG-1 neuron_unresolved_1
#> AAACCCACATAATCGC-1 migrating_interneuron</div><div class='input'><span class='fu'><a href='https://rdrr.io/r/base/table.html'>table</a></span><span class='op'>(</span><span class='va'>gbm.ed.dc</span><span class='op'>$</span><span class='va'>cell_status</span><span class='op'>)</span>
</div><div class='output co'>#>
#> cell damaged_cell empty_droplet
#> 7389 1349 920 </div><div class='input'>
</div></pre>
</div>
<div class="col-md-3 hidden-xs hidden-sm" id="pkgdown-sidebar">
<nav id="toc" data-toggle="toc" class="sticky-top">
<h2 data-toc-skip>Contents</h2>
</nav>
</div>
</div>
<footer>
<div class="copyright">
<p>Developed by <NAME>.</p>
</div>
<div class="pkgdown">
<p>Site built with <a href="https://pkgdown.r-lib.org/">pkgdown</a> 1.6.1.</p>
</div>
</footer>
</div>
</body>
</html>
<|start_filename|>docs/reference/identify_empty_drops.html<|end_filename|>
<!-- Generated by pkgdown: do not edit by hand -->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Identify empty droplets — identify_empty_drops • DropletQC</title>
<!-- jquery -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.4.1/jquery.min.js" integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script>
<!-- Bootstrap -->
<link href="https://cdnjs.cloudflare.com/ajax/libs/bootswatch/3.4.0/flatly/bootstrap.min.css" rel="stylesheet" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.4.1/js/bootstrap.min.js" integrity="sha256-nuL8/2cJ5NDSSwnKD8VqreErSWHtnEP9E7AySL+1ev4=" crossorigin="anonymous"></script>
<!-- bootstrap-toc -->
<link rel="stylesheet" href="../bootstrap-toc.css">
<script src="../bootstrap-toc.js"></script>
<!-- Font Awesome icons -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/all.min.css" integrity="sha256-mmgLkCYLUQbXn0B1SRqzHar6dCnv9oZFPEC1g1cwlkk=" crossorigin="anonymous" />
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/v4-shims.min.css" integrity="sha256-wZjR52fzng1pJHwx4aV2AO3yyTOXrcDW7jBpJtTwVxw=" crossorigin="anonymous" />
<!-- clipboard.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/clipboard.js/2.0.6/clipboard.min.js" integrity="sha256-inc5kl9MA1hkeYUt+EC3BhlIgyp/2jDIyBLS6k3UxPI=" crossorigin="anonymous"></script>
<!-- headroom.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/headroom.min.js" integrity="sha256-AsUX4SJE1+yuDu5+mAVzJbuYNPHj/WroHuZ8Ir/CkE0=" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/jQuery.headroom.min.js" integrity="sha256-ZX/yNShbjqsohH1k95liqY9Gd8uOiE1S4vZc+9KQ1K4=" crossorigin="anonymous"></script>
<!-- pkgdown -->
<link href="../pkgdown.css" rel="stylesheet">
<script src="../pkgdown.js"></script>
<meta property="og:title" content="Identify empty droplets — identify_empty_drops" />
<meta property="og:description" content="This function is used to identify a suitable nuclear fraction
cut-off point to guide the identification of empty droplets. To do this it
calculates the kernel density estimate of the input nuclear fraction scores
and identifies the trough after the first peak, which is assumed to
represent the population of empty droplets." />
<!-- mathjax -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js" integrity="sha256-nvJJv9wWKEm88qvoQl9ekL2J+k/RWIsaSScxxlsrv8k=" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/config/TeX-AMS-MML_HTMLorMML.js" integrity="sha256-84DKXVJXs0/F8OTMzX4UR909+jtl4G7SPypPavF+GfA=" crossorigin="anonymous"></script>
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body data-spy="scroll" data-target="#toc">
<div class="container template-reference-topic">
<header>
<div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<span class="navbar-brand">
<a class="navbar-link" href="../index.html">DropletQC</a>
<span class="version label label-default" data-toggle="tooltip" data-placement="bottom" title="Released version">0.0.0.9000</span>
</span>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li>
<a href="../index.html">
<span class="fas fa-home fa-lg"></span>
</a>
</li>
<li>
<a href="../articles/DropletQC.html">Get started</a>
</li>
<li>
<a href="../reference/index.html">Reference</a>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
</ul>
</div><!--/.nav-collapse -->
</div><!--/.container -->
</div><!--/.navbar -->
</header>
<div class="row">
<div class="col-md-9 contents">
<div class="page-header">
<h1>Identify empty droplets</h1>
<div class="hidden name"><code>identify_empty_drops.Rd</code></div>
</div>
<div class="ref-description">
<p>This function is used to identify a suitable nuclear fraction
cut-off point to guide the identification of empty droplets. To do this it
calculates the kernel density estimate of the input nuclear fraction scores
and identifies the trough after the first peak, which is assumed to
represent the population of empty droplets.</p>
</div>
<pre class="usage"><span class='fu'>identify_empty_drops</span><span class='op'>(</span>
<span class='va'>nf_umi</span>,
nf_rescue <span class='op'>=</span> <span class='fl'>0.05</span>,
umi_rescue <span class='op'>=</span> <span class='fl'>1000</span>,
include_plot <span class='op'>=</span> <span class='cn'>FALSE</span>,
plot_name <span class='op'>=</span> <span class='cn'>NULL</span>,
plot_path <span class='op'>=</span> <span class='cn'>NULL</span>,
plot_width <span class='op'>=</span> <span class='fl'>18</span>,
plot_height <span class='op'>=</span> <span class='fl'>13</span>,
pdf_png <span class='op'>=</span> <span class='st'>"png"</span>
<span class='op'>)</span></pre>
<h2 class="hasAnchor" id="arguments"><a class="anchor" href="#arguments"></a>Arguments</h2>
<table class="ref-arguments">
<colgroup><col class="name" /><col class="desc" /></colgroup>
<tr>
<th>nf_umi</th>
<td><p>data frame, containing two columns; the nuclear fraction
estimates in the first column and the total UMI count for each barcode in
the second column</p></td>
</tr>
<tr>
<th>nf_rescue</th>
<td><p>numeric, a rescue parameter defining a minimum nuclear
fraction score between zero and one. This is used in combination with
<code>umi_rescue</code> to identify cells that were misidentified as empty droplets</p></td>
</tr>
<tr>
<th>umi_rescue</th>
<td><p>integer, a rescue parameter defining a minimum UMI count.
This is used in combination with <code>nf_rescue</code> to identify cells that were
misidentified as empty droplets</p></td>
</tr>
<tr>
<th>include_plot</th>
<td><p>logical, whether or not to produce a plot illustrating
how the nuclear fraction threshold was identified and which barcodes have
been called as empty droplets. In the plot of nuclear fraction vs log10(UMI
counts), empty droplets are expected to occupy the lower left corner of the
plot.</p></td>
</tr>
<tr>
<th>plot_name</th>
<td><p>character, if provided a plot will be saved with the
provided name</p></td>
</tr>
<tr>
<th>plot_path</th>
<td><p>character, if provided a plot will be saved to the provided
path</p></td>
</tr>
<tr>
<th>plot_width</th>
<td><p>numeric, plot width in cm</p></td>
</tr>
<tr>
<th>plot_height</th>
<td><p>numeric, plot height in cm</p></td>
</tr>
<tr>
<th>pdf_png</th>
<td><p>character, either "png" or "pdf"</p></td>
</tr>
</table>
<h2 class="hasAnchor" id="value"><a class="anchor" href="#value"></a>Value</h2>
<p>data frame, the original data frame is returned plus an additional
column identifying each barcode as a "cell" or "empty_droplet"</p>
<h2 class="hasAnchor" id="examples"><a class="anchor" href="#examples"></a>Examples</h2>
<pre class="examples"><div class='input'><span class='fu'><a href='https://rdrr.io/r/utils/data.html'>data</a></span><span class='op'>(</span><span class='st'>"qc_examples"</span><span class='op'>)</span>
<span class='va'>gbm</span> <span class='op'><-</span> <span class='va'>qc_examples</span><span class='op'>[</span><span class='va'>qc_examples</span><span class='op'>$</span><span class='va'>sample</span><span class='op'>==</span><span class='st'>"GBM"</span>,<span class='op'>]</span>
<span class='va'>gbm.ed</span> <span class='op'><-</span> <span class='va'>gbm</span><span class='op'>[</span>,<span class='fu'><a href='https://rdrr.io/r/base/c.html'>c</a></span><span class='op'>(</span><span class='st'>"nuclear_fraction_droplet_qc"</span>,<span class='st'>"umi_count"</span><span class='op'>)</span><span class='op'>]</span>
<span class='va'>gbm.ed</span> <span class='op'><-</span> <span class='fu'>identify_empty_drops</span><span class='op'>(</span>nf_umi <span class='op'>=</span> <span class='va'>gbm.ed</span><span class='op'>)</span>
<span class='fu'><a href='https://rdrr.io/r/utils/head.html'>head</a></span><span class='op'>(</span><span class='va'>gbm.ed</span><span class='op'>)</span>
</div><div class='output co'>#> nuclear_fraction_droplet_qc umi_count cell_status
#> AAACCCAAGGCGATAC-1 0.1947243 2226 cell
#> AAACCCAAGGCTGTAG-1 0.2766798 1063 cell
#> AAACCCACAAGTCCCG-1 0.1843824 17883 cell
#> AAACCCACAGATGCGA-1 0.2919902 8172 cell
#> AAACCCACAGGTGAGT-1 0.3295617 9057 cell
#> AAACCCAGTCTTGCGG-1 0.3795893 5612 cell</div><div class='input'><span class='fu'><a href='https://rdrr.io/r/base/table.html'>table</a></span><span class='op'>(</span><span class='va'>gbm.ed</span><span class='op'>$</span><span class='va'>cell_status</span><span class='op'>)</span>
</div><div class='output co'>#>
#> cell empty_droplet
#> 5296 220 </div><div class='input'>
</div></pre>
</div>
<div class="col-md-3 hidden-xs hidden-sm" id="pkgdown-sidebar">
<nav id="toc" data-toggle="toc" class="sticky-top">
<h2 data-toc-skip>Contents</h2>
</nav>
</div>
</div>
<footer>
<div class="copyright">
<p>Developed by <NAME>.</p>
</div>
<div class="pkgdown">
<p>Site built with <a href="https://pkgdown.r-lib.org/">pkgdown</a> 1.6.1.</p>
</div>
</footer>
</div>
</body>
</html>
| powellgenomicslab/DropletQC |
<|start_filename|>plugin/GeometricMean/src/GeometricMean.cpp<|end_filename|>
#include "CoreConcept.h"
#include "Exceptions.h"
#include "ScalarImp.h"
#include "GeometricMean.h"
ConstantSP geometricMean(const ConstantSP &x, const ConstantSP &placeholder) {
string syntax = "Usage: geometricMean::geometricMean(x). ";
if (!x->isScalar() || !x->isVector())
throw IllegalArgumentException("geometricMean::geometricMean", syntax + "x must be a number or a numeric vector.");
switch (x->getType()) {
case DT_CHAR: return computeGeometricMean<char>(x);
case DT_SHORT: return computeGeometricMean<short>(x);
case DT_INT: return computeGeometricMean<int>(x);
case DT_LONG: return computeGeometricMean<long long>(x);
case DT_DOUBLE: return computeGeometricMean<double>(x);
case DT_FLOAT: return computeGeometricMean<float>(x);
default: throw IllegalArgumentException("geometricMean::geometricMean", syntax + "x must be a number or a numeric vector.");
}
}
<|start_filename|>plugin/LoadMyData/src/LoadMyData.h<|end_filename|>
#include "CoreConcept.h"
extern "C" ConstantSP extractMyDataSchema(const ConstantSP &placeholderA, const ConstantSP &placeholderB);
extern "C" ConstantSP loadMyData(Heap *heap, vector<ConstantSP> &args);
extern "C" ConstantSP loadMyDataEx(Heap *heap, vector<ConstantSP> &args);
extern "C" ConstantSP myDataDS(Heap *heap, vector<ConstantSP> &args);
struct MyData {
long long id;
char symbol[8];
char date[8];
double value;
};
int parseDate(char date[8]);
<|start_filename|>plugin/LoadMyData/src/LoadMyData.cpp<|end_filename|>
#include <fstream>
#include <iostream>
#include "ComputingModel.h"
#include "CoreConcept.h"
#include "DBFileIO.h"
#include "ScalarImp.h"
#include "SysIO.h"
#include "Util.h"
#include "LoadMyData.h"
ConstantSP extractMyDataSchema(const ConstantSP &placeholderA, const ConstantSP &placeholderB) {
ConstantSP colNames = Util::createVector(DT_STRING, 4);
ConstantSP colTypes = Util::createVector(DT_STRING, 4);
string names[] = {"id", "symbol", "date", "value"};
string types[] = {"LONG", "SYMBOL", "DATE", "DOUBLE"};
colNames->setString(0, 4, names);
colTypes->setString(0, 4, types);
vector<ConstantSP> schema = {colNames, colTypes};
vector<string> header = {"name", "type"};
return Util::createTable(header, schema);
}
ConstantSP loadMyData(Heap *heap, vector<ConstantSP> &args) {
string syntax = "Usage: loadMyData::loadMyData(path, [start], [length]). ";
ConstantSP path = args[0];
if (path->getCategory() != LITERAL || !path->isScalar())
throw IllegalArgumentException("loadMyData::loadMyData", syntax + "path must be a string.");
long long fileLength = Util::getFileLength(path->getString());
size_t bytesPerRow = sizeof(MyData);
long long rowNum = fileLength / bytesPerRow;
int start = 0;
if (args.size() >= 2 && args[1]->getCategory() == INTEGRAL)
start = args[1]->getInt();
int length = rowNum - start;
if (args.size() >= 3 && args[2]->getCategory() == INTEGRAL)
length = args[2]->getInt();
if (start < 0 || start >= rowNum)
throw IllegalArgumentException("loadMyData::loadMyData", syntax + "start must be a positive number smaller than number of rows in the file.");
if (start + length > rowNum)
length = rowNum - start;
DataInputStreamSP inputStream = Util::createBlockFileInputStream(path->getString(), 0, fileLength, Util::BUF_SIZE, start * bytesPerRow, length * bytesPerRow);
char buf[Util::BUF_SIZE];
size_t actualLength;
long long idBuf[Util::BUF_SIZE];
string symbolBuf[Util::BUF_SIZE];
int dateBuf[Util::BUF_SIZE]; // DolphinDB中date类型底层用int存储
double valueBuf[Util::BUF_SIZE];
VectorSP id = Util::createVector(DT_LONG, 0);
VectorSP symbol = Util::createVector(DT_SYMBOL, 0);
VectorSP date = Util::createVector(DT_DATE, 0);
VectorSP value = Util::createVector(DT_DOUBLE, 0);
int cursor = 0;
while (true) {
inputStream->readBytes(buf, Util::BUF_SIZE, actualLength);
if (actualLength <= 0)
break;
int actualRowNum = actualLength / bytesPerRow;
int currRowNum = 0;
for (char *row = buf; currRowNum < actualRowNum; currRowNum++, row += bytesPerRow) {
MyData *myData = (MyData *) row;
idBuf[cursor] = myData->id;
symbolBuf[cursor] = myData->symbol;
dateBuf[cursor] = parseDate(myData->date);
valueBuf[cursor] = myData->value;
cursor++;
if (cursor == Util::BUF_SIZE || currRowNum == actualRowNum - 1) { // 缓冲区已满,或读到最后一行
id->appendLong(idBuf, cursor);
symbol->appendString(symbolBuf, cursor);
date->appendInt(dateBuf, cursor);
value->appendDouble(valueBuf, cursor);
cursor = 0;
}
}
}
vector<ConstantSP> cols = {id, symbol, date, value};
vector<string> colNames = {"id", "symbol", "date", "value"};
return Util::createTable(colNames, cols);
}
ConstantSP loadMyDataEx(Heap *heap, vector<ConstantSP> &args) {
string syntax = "Usage: loadMyDataEx::loadMyDataEx(db, tableName, partitionColumns, path, [start], [length]). ";
ConstantSP db = args[0];
ConstantSP tableName = args[1];
ConstantSP partitionColumns = args[2];
ConstantSP path = args[3];
if (!db->isDatabase())
throw IllegalArgumentException("loadMyDataEx::loadMyDataEx", syntax + "db must be a database handle.");
if (tableName->getCategory() != LITERAL || !tableName->isScalar())
throw IllegalArgumentException("loadMyDataEx::loadMyDataEx", syntax + "tableName must be a string.");
if (partitionColumns->getCategory() != LITERAL || (!partitionColumns->isScalar() && !partitionColumns->isVector()))
throw IllegalArgumentException("loadMyDataEx::loadMyDataEx", syntax + "partitionColumns must be a string or a string vector.");
if (path->getCategory() != LITERAL || !path->isScalar())
throw IllegalArgumentException("loadMyDataEx::loadMyDataEx", syntax + "path must be a string.");
long long fileLength = Util::getFileLength(path->getString());
size_t bytesPerRow = sizeof(MyData);
long long rowNum = fileLength / bytesPerRow;
int start = 0;
if (args.size() >= 2 && args[1]->getCategory() == INTEGRAL)
start = args[1]->getInt();
int length = rowNum - start;
if (args.size() >= 3 && args[2]->getCategory() == INTEGRAL)
length = args[2]->getInt();
if (start < 0 || start >= rowNum)
throw IllegalArgumentException("loadMyDataEx::loadMyDataEx", syntax + "start must be a positive number smaller than number of rows in the file.");
if (start + length > rowNum)
length = rowNum - start;
string dbPath = ((SystemHandleSP) db)->getDatabaseDir();
vector<ConstantSP> existsTableArgs = {new String(dbPath), tableName};
bool existsTable = heap->currentSession()->getFunctionDef("existsTable")->call(heap, existsTableArgs)->getBool(); // 相当于existsTable(dbPath, tableName)
ConstantSP result;
if (existsTable) { // 表存在,直接加载表
vector<ConstantSP> loadTableArgs = {db, tableName};
result = heap->currentSession()->getFunctionDef("loadTable")->call(heap, loadTableArgs); // 相当于loadTable(db, tableName)
}
else { // 表不存在,创建表
TableSP schema = extractMyDataSchema(new Void(), new Void());
ConstantSP dummyTable = DBFileIO::createEmptyTableFromSchema(schema);
vector<ConstantSP> createTableArgs = {db, dummyTable, tableName, partitionColumns};
result = heap->currentSession()->getFunctionDef("createPartitionedTable")->call(heap, createTableArgs); // 相当于createPartitionedTable(db, dummyTable, tableName, partitionColumns)
}
int sizePerPartition = 16 * 1024 * 1024;
int partitionNum = fileLength / sizePerPartition;
vector<DistributedCallSP> tasks;
FunctionDefSP func = Util::createSystemFunction("loadMyData", loadMyData, 1, 3, false);
int partitionStart = start;
int partitionLength = length / partitionNum;
for (int i = 0; i < partitionNum; i++) {
if (i == partitionNum - 1)
partitionLength = length - partitionLength * i;
vector<ConstantSP> partitionArgs = {path, new Int(partitionStart), new Int(partitionLength)};
ObjectSP call = Util::createRegularFunctionCall(func, partitionArgs); // 将会调用loadMyData(path, taskStart, taskLength)
tasks.push_back(new DistributedCall(call, true));
partitionStart += partitionLength;
}
vector<ConstantSP> appendToResultArgs = {result};
FunctionDefSP appendToResult = Util::createPartialFunction(heap->currentSession()->getFunctionDef("append!"), appendToResultArgs); // 相当于append!{result}
vector<FunctionDefSP> functors = {appendToResult};
PipelineStageExecutor executor(functors, false);
executor.execute(heap, tasks);
return result;
}
ConstantSP myDataDS(Heap *heap, vector<ConstantSP> &args) {
string syntax = "Usage: myDataDS::myDataDS(path, [start], [length]). ";
ConstantSP path = args[0];
if (path->getCategory() != LITERAL || !path->isScalar())
throw IllegalArgumentException("myDataDS::myDataDS", syntax + "path must be a string.");
long long fileLength = Util::getFileLength(path->getString());
size_t bytesPerRow = sizeof(MyData);
long long rowNum = fileLength / bytesPerRow;
int start = 0;
if (args.size() >= 2 && args[1]->getCategory() == INTEGRAL)
start = args[1]->getInt();
int length = rowNum - start;
if (args.size() >= 3 && args[2]->getCategory() == INTEGRAL)
length = args[2]->getInt();
if (start < 0 || start >= rowNum)
throw IllegalArgumentException("myDataDS::myDataDS", syntax + "start must be a positive number smaller than number of rows in the file.");
if (start + length > rowNum)
length = rowNum - start;
int sizePerPartition = 16 * 1024 * 1024;
int partitionNum = fileLength / sizePerPartition;
int partitionStart = start;
int partitionLength = length / partitionNum;
FunctionDefSP func = Util::createSystemFunction("loadMyData", loadMyData, 1, 3, false);
ConstantSP dataSources = Util::createVector(DT_ANY, partitionNum);
for (int i = 0; i < partitionNum; i++) {
if (i == partitionNum - 1)
partitionLength = length - partitionLength * i;
vector<ConstantSP> partitionArgs = {path, new Int(partitionStart), new Int(partitionLength)};
ObjectSP code = Util::createRegularFunctionCall(func, partitionArgs); // 将会调用loadMyData(path, taskStart, taskLength)
dataSources->set(i, new DataSource(code));
}
return dataSources;
}
int parseDate(char date[8]) {
int year = date[0] * 1000 + date[1] * 100 + date[2] * 10 + date[3];
int month = date[4] * 10 + date[5];
int day = date[6] * 10 + date[7];
return Util::countDays(year, month, day);
}
<|start_filename|>benchmark/DolphinDB vs TimescaleDB/REPL.coffee<|end_filename|>
# ------------ DolphinDB 测试结果处理
# 复制测试结果
text = new Text paste()
text.remove_empty_lines().replace /Time elapsed: (\d+)\.\d+ ms/, '$1'
copy text.to_string()
text.map (l)-> l.replace(/Time elapsed: (\d+)\.\d+ ms/, '$1').toNumber()
# ------------ 占用空间分析 结果处理
# 复制测试结果
text = new Text(paste())
text.replace ' MB', ''
text.remove_empty_lines()
sum = 0
text.each (l)-> sum += l.toNumber()
sum
# 5107 total -> 7430 total after create index
# 844 index -> 3167 index after create index
5107 - 844 == 4271
7430 - 3167 == 4274
# ------------ 加载测试结果
result0 = Text.load('D:/0/DB/testresult_timescaledb_small.txt')
result1 = Text.load('D:/0/DB/testresult_dolphindb_small.txt')
result0 = Text.load('D:/0/DB/testresult_timescaledb_big.txt')
result1 = Text.load('D:/0/DB/testresult_dolphindb_big.txt')
result0.remove_last_empty_line().map (l)-> l.toNumber()
result1.remove_last_empty_line().map (l)-> l.toNumber()
# ------------ 生成对比表格
# 复制所有测试用例
test_text = paste()
tests = test_text.replace(/\r\n/g, '\n').split('\n\n\n').map (x)-> x.trim()
tests.each (test, i)->
log '| ' +
pad((i+1) + '. ', 4) + test.split('\n')[0].split('. ')[1] + ' | ' +
result1[i] + ' | ' +
result0[i] + ' | ' +
Math.round(result0[i] / result1[i]) + ' | ' +
(result0[i] - result1[i]) + ' | '
# ------------ explain analyze 计算总时间
text = paste()
[_, a, b] = text.match /.*?(\d+)\.[\s\S]*?(\d+)\..*/
a.toNumber() + b.toNumber()
<|start_filename|>benchmark/DolphinDB vs InfluxDB/convert_small.coffee<|end_filename|>
# ------------------------------------ InfluxDB 测试脚本
# ------------ readings 数据定义
cols = ["time", "device_id", "battery_level", "battery_status", "battery_temperature", "bssid", "cpu_avg_1min", "cpu_avg_5min", "cpu_avg_15min", "mem_free", "mem_used", "rssi", "ssid"]
tags = ["device_id", "battery_status", "bssid", "ssid"]
fields = ["battery_level", "battery_temperature", "cpu_avg_1min", "cpu_avg_5min", "cpu_avg_15min", "mem_free", "mem_used", "rssi"]
# ------------ 20 行数据
rs = fs.createReadStream 'D:/1/comp/timescaledb/readings.csv',
encoding: 'UTF-8'
ws = fs.createWriteStream 'D:/readings_small.txt',
encoding: 'UTF-8'
# ------------ 所有行数据
rs = fs.createReadStream 'D:/devices/devices_big_readings.csv',
encoding: 'UTF-8'
ws = fs.createWriteStream 'D:/readings.txt',
encoding: 'UTF-8'
# ------------ 小数据内存读写
csv = read('D:/1/comp/timescaledb/readings.csv')
lines = csv.split_lines()
write('D:/readings.txt', text)
# ------------ CSV -> Line Protocol
map_line= (l)->
o = {}
for col, i in l.split(',')
o[cols[i]] = col
line = 'readings,'
line += tags.map (tag)->
tag + '=' + o[tag]
.join(',')
line += ' '
line += fields.map (field)->
field + '=' + o[field]
.join(',')
line += ' ' + new Date(o.time).getTime() / 1000
# ------------ 流读写
ws.write '''
# DDL
CREATE DATABASE test
CREATE RETENTION POLICY one_day ON test DURATION INF REPLICATION 1 SHARD DURATION 1d DEFAULT
# DML
# CONTEXT-DATABASE:test
# CONTEXT-RETENTION-POLICY:one_day
'''
buf = ''
cnt = 0
ts = new stream.Transform
transform: (chunk, encoding, callback)->
lines = chunk.split('\n')
lines[0] = buf + lines[0]
buf = lines.pop()
cnt += lines.length
callback(null, lines.map(map_line).join('\n') + '\n')
flush: (callback)->
if buf
callback(null, buf + '\n')
else
callback(null)
decodeStrings: false
await util.promisify(stream.pipeline)(rs, ts, ws)
stream.pipeline rs, ts, ws, (err)->
log err
# ------------ InfluxDB NodeJS API
Influx = require 'influx'
influx.createDatabase 'test'
influx = new Influx.InfluxDB
host: 'localhost'
database: 'test'
schema: [
measurement: 'readings'
tags: [
'device_id'
'battery_status'
'bssid'
'ssid'
]
fields:
battery_level : Influx.FieldType.INTEGER
battery_temperature: Influx.FieldType.FLOAT
cpu_avg_1min : Influx.FieldType.FLOAT
cpu_avg_5min : Influx.FieldType.FLOAT
cpu_avg_15min : Influx.FieldType.FLOAT
mem_free : Influx.FieldType.INTEGER
mem_used : Influx.FieldType.INTEGER
rssi : Influx.FieldType.INTEGER
]
influx.getDatabaseNames()
influx.writePoints [
measurement: 'readings'
tags:
device_id: 'demo000000'
fields:
battery_level: 96
timestamp
,
measurement: 'readings'
tags:
device_id: 'demo000001'
fields:
battery_level: 78
]
<|start_filename|>plugin/ColumnAvg/src/ColumnAvg.h<|end_filename|>
#include "CoreConcept.h"
extern "C" ConstantSP columnAvg(Heap *heap, vector<ConstantSP> &args);
extern "C" ConstantSP columnAvgMap(Heap *heap, vector<ConstantSP> &args);
extern "C" ConstantSP columnAvgFinal(const ConstantSP &result, const ConstantSP &placeholder);
<|start_filename|>script/testHaCluster.java<|end_filename|>
import com.xxdb.DBConnection;
import com.xxdb.data.BasicLong;
import com.xxdb.data.BasicTable;
import com.xxdb.data.Entity;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class cluster2 {
public static DBConnection conn;
public static String[] sites = {"192.168.1.12:22217", "192.168.1.12:22218", "192.168.1.13:22217", "192.168.1.13:22218", "192.168.1.14:22217", "192.168.1.14:22218"};
public static void main(String[] args) throws IOException, InterruptedException {
conn = new DBConnection();
boolean b = conn.connect("192.168.1.12:22217", 22217, "admin", "123456", "", true, sites);
String script = "login(`admin,`123456)\n" +
"if(existsDatabase(\"dfs://javaAPItest2\")){dropDatabase(\"dfs://javaAPItest2\")};\n" +
"db=database(\"dfs://javaAPItest2\",VALUE,2018.08.01..2018.08.10);\n" +
"t=table(2018.08.01..2018.08.10 as date ,1..10 as id);\n" +
"pt=db.createPartitionedTable(t,`pt,`date);\n" +
"retentionHour=3*24\n" +
"setRetentionPolicy(db,retentionHour,0);\n";
conn.run(script);
int i = 1;
BasicLong res;
while (b) {
try {
if (i < 1000) {
String s = "t = table(take(2018.08.01..2018.08.10 ," + i + ") as date ,take(1.." + i + "," + i + ") as id);" + "pt=loadTable(\"dfs://javaAPItest2\",`pt);pt.append!(t);exec count(*) from pt ";
res = (BasicLong) conn.run(s);
} else {
String s = "t = table(take(2018.08.01..2018.08.10 ,100) as date ,rand("+i+",100) as id);" + "pt=loadTable(\"dfs://javaAPItest2\",`pt);pt.append!(t);exec count(*) from pt ";
res = (BasicLong) conn.run(s);
}
System.out.println(i + "=" + res.getLong());
}catch(Exception ex){
ex.printStackTrace();
}
i++;
}
}
}
<|start_filename|>plugin/Msum/src/Msum.cpp<|end_filename|>
#include "CoreConcept.h"
#include "Util.h"
#include "Msum.h"
ConstantSP msum(const ConstantSP &X, const ConstantSP &window) {
string syntax = "Usage: msum::msum(X, window). ";
if (!X->isVector() || !X->isNumber())
throw IllegalArgumentException("msum::msum", syntax + "X must be a numeric vector.");
INDEX size = X->size();
int windowSize = window->getInt();
if (windowSize <= 1)
throw IllegalArgumentException("msum::msum", syntax + "window must be at least 2.");
ConstantSP result = Util::createVector(DT_DOUBLE, size);
double buf[Util::BUF_SIZE];
double windowHeadBuf[Util::BUF_SIZE];
double resultBuf[Util::BUF_SIZE];
double tmpSum = 0.0;
INDEX start = 0;
while (start < windowSize) {
int len = std::min(Util::BUF_SIZE, windowSize - start);
const double *p = X->getDoubleConst(start, len, buf);
double *r = result->getDoubleBuffer(start, len, resultBuf);
for (int i = 0; i < len; i++) {
if (p[i] != DBL_NMIN) // p[i] is not NULL
tmpSum += p[i];
r[i] = DBL_NMIN;
}
start += len;
}
result->setDouble(windowSize - 1, tmpSum); // 上一个循环多设置了一个NULL,填充为tmpSum
while (start < size) {
int bufSize = std::min(Util::BUF_SIZE - start % Util::BUF_SIZE, Util::BUF_SIZE);
int len = std::min(bufSize, size - start);
const double *p = X->getDoubleConst(start, len, buf);
const double *q = X->getDoubleConst(start - windowSize, len, windowHeadBuf);
double *r = result->getDoubleBuffer(start, len, resultBuf);
for (int i = 0; i < len; i++) {
if (p[i] != DBL_NMIN)
tmpSum += p[i];
if (q[i] != DBL_NMIN)
tmpSum -= q[i];
r[i] = tmpSum;
}
start += len;
}
return result;
}
<|start_filename|>plugin/ColumnAvg/src/ColumnAvg.cpp<|end_filename|>
#include "CoreConcept.h"
#include "Util.h"
#include "ScalarImp.h"
#include "ColumnAvg.h"
ConstantSP columnAvg(Heap *heap, vector<ConstantSP> &args) {
string syntax = "Usage: columnAvg::columnAvg(ds, colNames). ";
ConstantSP ds = args[0];
ConstantSP colNames = args[1];
if (!ds->isTuple())
throw IllegalArgumentException("columnAvg::columnAvg", syntax + "ds must be a tuple of data sources.");
if (colNames->getCategory() != LITERAL)
throw IllegalArgumentException("columnAvg::columnAvg", syntax + "colNames must be must be a string or a string vector.");
FunctionDefSP mapFunc = heap->currentSession()->getFunctionDef("columnAvg::columnAvgMap");
vector<ConstantSP> mapWithColNamesArgs = {new Void(), colNames};
FunctionDefSP mapWithColNames = Util::createPartialFunction(mapFunc, mapWithColNamesArgs); // columnAvgMap{, colNames}
FunctionDefSP reduceFunc = heap->currentSession()->getFunctionDef("add");
FunctionDefSP finalFunc = heap->currentSession()->getFunctionDef("columnAvg::columnAvgFinal");
FunctionDefSP mr = heap->currentSession()->getFunctionDef("mr"); // mr(ds, columnAvgMap{, colNames}, add, columnAvgFinal)
vector<ConstantSP> mrArgs = {ds, mapWithColNames, reduceFunc, finalFunc};
return mr->call(heap, mrArgs);
}
ConstantSP columnAvgMap(Heap *heap, vector<ConstantSP> &args) {
TableSP table = args[0];
ConstantSP colNames = args[1];
double sum = 0.0;
int count = 0;
for (int i = 0; i < colNames->size(); i++) {
string colName = colNames->getString(i);
VectorSP col = table->getColumn(colName);
sum += col->sum()->getDouble();
count += col->count();
}
ConstantSP result = Util::createVector(DT_ANY, 2);
result->set(0, new Double(sum));
result->set(1, new Int(count));
return result;
}
ConstantSP columnAvgFinal(const ConstantSP &result, const ConstantSP &placeholder) {
double sum = result->get(0)->getDouble();
int count = result->get(1)->getInt();
return new Double(sum / count);
}
<|start_filename|>benchmark/DolphinDB vs TimescaleDB/make_symbol_enum.coffee<|end_filename|>
PG = require 'pg'
client = new PG.Client
connectionString: 'postgresql://postgres:postgres@192.168.1.201:5432/test'
await client.connect()
# 测试连接
await client.query 'select now()'
symbols = fs.readFileSync('D:/symbols.txt', 'UTF-8').split('\n')[...-1]
symbol_cmd = 'create type Symbol as enum ' + '(' + symbols.map((e)-> "'" + e + "'").join(',') + ');'
await client.query symbol_cmd
fs.writeFileSync 'D:/make_symbol_enum.sql', symbol_cmd
<|start_filename|>plugin/Handler/src/Handler.cpp<|end_filename|>
#include "CoreConcept.h"
#include "Exceptions.h"
#include "Logger.h"
#include "ScalarImp.h"
#include "Handler.h"
ConstantSP handler(Heap *heap, vector<ConstantSP> &args) {
string syntax = "Usage: handler::handler(indices, table, msg). ";
ConstantSP indices = args[0];
TableSP table = args[1];
ConstantSP msg = args[2];
if (indices->getCategory() != INTEGRAL)
throw IllegalArgumentException("handler::handler", syntax + "indices must be integral.");
int msgSize = msg->size();
vector<ConstantSP> msgToAppend;
for (int i = 0; i < indices->size(); i++) {
int index = indices->getInt(i);
if (index < 0 || index >= msgSize)
throw RuntimeException("Index out of range.");
msgToAppend.push_back(msg->get(index));
}
INDEX insertedRows;
string errMsg;
bool success = table->append(msgToAppend, insertedRows, errMsg);
if (!success)
LOG_ERR("Failed to append to table: ", errMsg);
return new Void();
}
<|start_filename|>plugin/Msum/src/Msum.h<|end_filename|>
#include "CoreConcept.h"
extern "C" ConstantSP msum(const ConstantSP &X, const ConstantSP &window);
<|start_filename|>benchmark/DolphinDB vs InfluxDB/convert_big.coffee<|end_filename|>
fs = require 'fs'
stream = require 'stream'
util = require 'util'
# ------------ TAQ 数据定义
cols = ['symbol', 'date', 'time', 'bid', 'ofr', 'bidsiz', 'ofrsiz', 'mode', 'ex', 'mmid']
tags = ["symbol", "mode", "ex", "mmid"]
fields = ["bid", "ofr", "bidsiz", "ofrsiz"]
# ------------ 所有行数据
# rs = fs.createReadStream 'D:/1/comp/influxdb/TAQ.csv',
# encoding: 'UTF-8'
# start: 'symbol,date,time,bid,ofr,bidsiz,ofrsiz,mode,ex,mmid\n'.length
# ws = fs.createWriteStream 'D:/TAQ.txt',
# encoding: 'UTF-8'
# ------------ CSV -> Line Protocol
map_line= (l)->
o = {}
for col, i in l.split(',')
o[cols[i]] = col
line = 'taq'
for tag in tags
if o[tag]
line += ',' + tag + '=' + o[tag]
line += ' '
line += fields.map (field)->
field + '=' + o[field]
.join(',')
line += ' ' + new Date(o.date[0...4] + '-' + o.date[4...6] + '-' + o.date[6..] + ' ' + o.time + '.' + Math.round(Math.random() * 1000)).getTime()
buf = ''
cnt = 0
main= ->
for i in [1, 2, 3, 6, 7]
name = 'TAQ2007080' + i
rs = fs.createReadStream '/data/TAQ/csv/' + name + '.csv',
encoding: 'UTF-8'
start: 'symbol,date,time,bid,ofr,bidsiz,ofrsiz,mode,ex,mmid\n'.length
ws = fs.createWriteStream '/data/TAQ/' + name + '.txt',
encoding: 'UTF-8'
# rs = fs.createReadStream 'D:/1/comp/influxdb/TAQ.csv',
# encoding: 'UTF-8'
# start: 'symbol,date,time,bid,ofr,bidsiz,ofrsiz,mode,ex,mmid\n'.length
# ws = fs.createWriteStream 'D:/TAQ.txt',
# encoding: 'UTF-8'
# ------------ 流读写
ws.write '''
# DML
# CONTEXT-DATABASE:test2
# CONTEXT-RETENTION-POLICY:one_day
'''
buf = ''
cnt = 0
ts = new stream.Transform
transform: (chunk, encoding, callback)->
lines = chunk.split('\n')
lines[0] = buf + lines[0]
buf = lines.pop()
cnt += lines.length
callback(null, lines.map(map_line).join('\n') + '\n')
flush: (callback)->
if buf
callback(null, map_line(buf) + '\n')
else
callback(null)
decodeStrings: false
console.log '--- ' + i + ' ---'
await util.promisify(stream.pipeline)(rs, ts, ws)
main()
id = setInterval ->
console.log new Date(), cnt
, 10000
<|start_filename|>plugin/Handler/src/Handler.h<|end_filename|>
#include "CoreConcept.h"
extern "C" ConstantSP handler(Heap *heap, vector<ConstantSP> &args);
<|start_filename|>plugin/GeometricMean/src/GeometricMean.h<|end_filename|>
#include "CoreConcept.h"
extern "C" ConstantSP geometricMean(const ConstantSP &X, const ConstantSP &placeholder);
template <typename T>
inline bool isNull(T value);
template <typename T>
ConstantSP computeGeometricMean(ConstantSP x) {
if (((VectorSP) x)->isFastMode()) {
int size = x->size();
T *data = (T *)x->getDataArray();
double logSum = 0;
for (int i = 0; i < size; i++) {
if (!isNull(data[i]))
logSum += std::log(data[i]);
}
double mean = std::exp(logSum / size);
return new Double(mean);
}
else {
int size = x->size();
int segmentSize = x->getSegmentSize();
T **segments = (T **)x->getDataSegment();
INDEX start = 0;
int segmentId = 0;
double logSum = 0;
while (start < size) {
T *block = segments[segmentId];
int blockSize = std::min(segmentSize, size - start);
for (int i = 0; i < blockSize; i++) {
if (!isNull(block[i]))
logSum += std::log(block[i]);
}
start += blockSize;
segmentId++;
}
double mean = std::exp(logSum / size);
return new Double(mean);
}
}
template <>
inline bool isNull<char>(char value) {
return value == CHAR_MIN;
}
template <>
inline bool isNull<short>(short value) {
return value == SHRT_MIN;
}
template <>
inline bool isNull<int>(int value) {
return value == INT_MIN;
}
template <>
inline bool isNull<double>(double value) {
return value == DBL_NMIN;
}
template <>
inline bool isNull<float>(float value) {
return value == FLT_NMIN;
}
template <>
inline bool isNull<long long>(long long value) {
return value == LLONG_MIN;
} | dolphindb/tutorials_cn |
<|start_filename|>cmake-scripts/FindFlatbuffers.cmake<|end_filename|>
# Find flatbuffers
#
# FLATBUFFERS_FLATC - flatc.
# FLATBUFFERS_INCLUDE_DIR - where to find flatbuffers/*.
# FLATBUFFERS_LIBRARY - List of libraries when using flatbuffers.
# FLATBUFFERS_FOUND - True if flatbuffers found.
find_package(PkgConfig)
pkg_check_modules(FLATBUFFERS QUIET flatbuffers)
if(FLATBUFFERS_INCLUDE_DIR)
# Already in cache, be silent
set(FLATBUFFERS_FIND_QUIETLY TRUE)
endif()
find_program(FLATBUFFERS_FLATC flatc PATHS
${PROJECT_BINARY_DIR}/flatbuffers/flatbuffers/bin
${PROJECT_BINARY_DIR}-deps/flatbuffers/flatbuffers/bin)
find_path(FLATBUFFERS_INCLUDE_DIR flatbuffers/flatbuffers.h PATHS
${PROJECT_BINARY_DIR}/flatbuffers/flatbuffers/include
${PROJECT_BINARY_DIR}-deps/flatbuffers/flatbuffers/include)
find_library(FLATBUFFERS_LIBRARY flatbuffers PATHS
${PROJECT_BINARY_DIR}/flatbuffers/flatbuffers/lib
${PROJECT_BINARY_DIR}-deps/flatbuffers/flatbuffers/lib)
# handle the QUIETLY and REQUIRED arguments and set FLATBUFFERS_FOUND to TRUE
# if all listed variables are TRUE
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(FLATBUFFERS DEFAULT_MSG FLATBUFFERS_LIBRARY FLATBUFFERS_INCLUDE_DIR FLATBUFFERS_FLATC)
mark_as_advanced(FLATBUFFERS_LIBRARY FLATBUFFERS_INCLUDE_DIR FLATBUFFERS_FLATC)
| Yeolar/flattype |
<|start_filename|>KYVedioPlayer/KYVedioPlayer/KYNetworkVideoCellPlayVC.h<|end_filename|>
//
// KYNetworkVideoCellPlayVC.h
// KYVedioPlayer
//
// Created by kingly on 16/9/9.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import <UIKit/UIKit.h>
@interface KYNetworkVideoCellPlayVC : KYBaseViewController
@end
<|start_filename|>KYVedioPlayer/KYVedioPlayer/KYNetworkVideoCell.h<|end_filename|>
//
// KYNetworkVideoCell.h
// KYVedioPlayer
//
// Created by kingly on 16/9/12.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import <UIKit/UIKit.h>
#import "KYVideo.h"
@protocol KYNetworkVideoCellDelegate;
@interface KYNetworkVideoCell : UITableViewCell
@property (nonatomic,weak) id<KYNetworkVideoCellDelegate>mydelegate;
+(NSString *) cellReuseIdentifier;
@property (nonatomic, strong) UIImageView *vedioBg;
@property (nonatomic, strong) UIButton *playBtn;
@property (nonatomic, strong) NSIndexPath *indexPath;
@property (nonatomic,strong) KYVideo *video;
@end
@protocol KYNetworkVideoCellDelegate <NSObject>
-(void)networkVideoCellVedioBgTapGesture:(KYVideo *)video;
-(void)networkVideoCellOnClickVideoPlay:(KYVideo *)video withVideoPlayBtn:(UIButton *)videoPlayBtn;
@end
<|start_filename|>KYVedioPlayer/KYVedioPlayer/KYSwitchFreelyVC.h<|end_filename|>
//
// KYSwitchFreelyVC.h
// KYVedioPlayer
//
// Created by kingly on 16/9/9.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import "KYBaseViewController.h"
@interface KYSwitchFreelyVC : KYBaseViewController
@end
<|start_filename|>KYVedioPlayer/KYVedioPlayer/ViewController.h<|end_filename|>
//
// ViewController.h
// KYVedioPlayer
//
// Created by kingly on 16/9/8.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import <UIKit/UIKit.h>
@interface ViewController : KYBaseViewController
@end
<|start_filename|>KYVedioPlayer/Pods/Target Support Files/Pods-KYVedioPlayer/Pods-KYVedioPlayer-umbrella.h<|end_filename|>
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double Pods_KYVedioPlayerVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_KYVedioPlayerVersionString[];
<|start_filename|>KYVedioPlayer/KYVedioPlayer/BaseNavigationController/KYBaseViewController.h<|end_filename|>
//
// KYBaseViewController.h
// KYVedioPlayer
//
// Created by kingly on 16/9/8.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import <UIKit/UIKit.h>
#import "MBProgressHUD.h"
typedef void(^onSuccess)(NSArray *videoArray);
typedef void(^onFailed)(NSError *error);
@interface KYBaseViewController : UIViewController
@property (nonatomic,retain) MBProgressHUD* progressHUD;
- (void)addProgressHUD;
- (void)addProgressHUDWithMessage:(NSString*)message;
- (void)removeProgressHUD;
- (void)getVideoListWithURLString:(NSString *)URLString success:(onSuccess)success failed:(onFailed)failed;
@end
<|start_filename|>KYVedioPlayer/KYVedioPlayer/KYLocalVideoPlayVC.h<|end_filename|>
//
// KYLocalVideoPlayVC.h
// KYVedioPlayer
//
// Created by kingly on 16/9/9.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import "KYBaseViewController.h"
@interface KYLocalVideoPlayVC : KYBaseViewController
@property (nonatomic, retain) NSString * URLString;
@end
<|start_filename|>KYVedioPlayer/KYVedioPlayer/KYVideo.h<|end_filename|>
//
// KYVideo.h
// KYVedioPlayer
//
// Created by kingly on 16/9/10.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import <Foundation/Foundation.h>
@interface KYVideo : NSObject
@property (nonatomic, strong) NSString * title;
@property (nonatomic, strong) NSString * image;
@property (nonatomic, strong) NSString * video;
/**
*自定义cell的高度
*/
@property (nonatomic,assign) CGFloat curCellHeight;
@property (nonatomic, strong) NSIndexPath *indexPath;
@end
<|start_filename|>KYVedioPlayer/KYVedioPlayer/KYRememberLastPlayedVC.h<|end_filename|>
//
// KYRememberLastPlayedVC.h
// KYVedioPlayer
//
// Created by kingly on 16/9/9.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import "KYBaseViewController.h"
@interface KYRememberLastPlayedVC : KYBaseViewController
@end
<|start_filename|>KYVedioPlayerLib/KYVedioPlayer.h<|end_filename|>
//
// KYVedioPlayer.h
// KYVedioPlayer
//
// Created by kingly on 16/9/9.
// Copyright © 2016年 https://github.com/kingly09/KYVedioPlayer kingly inc . All rights reserved.
//
#import <Foundation/Foundation.h>
#if __has_include(<Masonry/Masonry.h>)
#import <Masonry/Masonry.h>
#else
#import "Masonry.h"
#endif
NS_ASSUME_NONNULL_BEGIN
@import MediaPlayer;
@import AVFoundation;
@import UIKit;
// 播放器的几种状态
typedef NS_ENUM(NSInteger, KYVedioPlayerState) {
KYVedioPlayerStateFailed, // 播放失败
KYVedioPlayerStateBuffering, // 缓冲中
KYVedioPlayerStatusReadyToPlay, // 将要播放
KYVedioPlayerStatePlaying, // 播放中
KYVedioPlayerStateStopped, //暂停播放
KYVedioPlayerStateFinished //播放完毕
};
// 枚举值,包含播放器左上角的关闭按钮的类型
typedef NS_ENUM(NSInteger, CloseBtnStyle){
CloseBtnStylePop, //pop箭头<-
CloseBtnStyleClose //关闭(X)
};
@class KYVedioPlayer;
@protocol KYVedioPlayerDelegate <NSObject>
@optional
///播放器事件
//点击播放暂停按钮代理方法
-(void)kyvedioPlayer:(KYVedioPlayer *)kyvedioPlayer clickedPlayOrPauseButton:(UIButton *)playOrPauseBtn;
//点击关闭按钮代理方法
-(void)kyvedioPlayer:(KYVedioPlayer *)kyvedioPlayer clickedCloseButton:(UIButton *)closeBtn;
//点击分享按钮代理方法
-(void)kyvedioPlayer:(KYVedioPlayer *)kyvedioPlayer onClickShareBtn:(UIButton *)shareBtn;
//点击全屏按钮代理方法
-(void)kyvedioPlayer:(KYVedioPlayer *)kyvedioPlayer clickedFullScreenButton:(UIButton *)fullScreenBtn;
//单击WMPlayer的代理方法
-(void)kyvedioPlayer:(KYVedioPlayer *)kyvedioPlayer singleTaped:(UITapGestureRecognizer *)singleTap;
//双击WMPlayer的代理方法
-(void)kyvedioPlayer:(KYVedioPlayer *)kyvedioPlayer doubleTaped:(UITapGestureRecognizer *)doubleTap;
///播放状态
//播放失败的代理方法
-(void)kyvedioPlayerFailedPlay:(KYVedioPlayer *)kyvedioPlayer playerStatus:(KYVedioPlayerState)state;
//准备播放的代理方法
-(void)kyvedioPlayerReadyToPlay:(KYVedioPlayer *)kyvedioPlayer playerStatus:(KYVedioPlayerState)state;
//播放完毕的代理方法
-(void)kyplayerFinishedPlay:(KYVedioPlayer *)kyvedioPlayer;
@end
@interface KYVedioPlayer : UIView
/**
* 播放器player
*/
@property (nonatomic,retain,nullable) AVPlayer *player;
/**
*playerLayer,可以修改frame
*/
@property (nonatomic,retain,nullable) AVPlayerLayer *playerLayer;
/** 播放器的代理 */
@property (nonatomic, weak)id <KYVedioPlayerDelegate> delegate;
/**
* 底部操作工具栏
*/
@property (nonatomic,retain ) UIView *bottomView;
/**
* 顶部操作工具栏
*/
@property (nonatomic,retain ) UIView *topView;
/**
* 显示播放视频的title
*/
@property (nonatomic,strong) UILabel *titleLabel;
/**
* 播放器状态
*/
@property (nonatomic, assign) KYVedioPlayerState state;
/**
* 播放器左上角按钮的类型
*/
@property (nonatomic, assign) CloseBtnStyle closeBtnStyle;
/**
* 定时器
*/
@property (nonatomic, retain,nullable) NSTimer *autoDismissTimer;
/**
* BOOL值判断是否自动隐藏底部视图,默认是自动隐藏
*/
@property (nonatomic,assign ) BOOL isAutoDismissBottomView;
/**
* BOOL值判断当前的状态
*/
@property (nonatomic,assign ) BOOL isFullscreen;
/**
* 控制全屏的按钮
*/
@property (nonatomic,retain ) UIButton *fullScreenBtn;
/**
* 播放暂停按钮
*/
@property (nonatomic,retain,nullable) UIButton *playOrPauseBtn;
/**
* 左上角关闭按钮
*/
@property (nonatomic,retain ) UIButton *closeBtn;
/**
* 右上角 分享按钮
*/
@property (nonatomic,retain ) UIButton *shareBtn;
/**
* 显示加载失败的UILabel
*/
@property (nonatomic,strong) UILabel *loadFailedLabel;
/**
* 当前播放的item
*/
@property (nonatomic, retain,nullable) AVPlayerItem *currentItem;
/**
* 菊花(加载框)
*/
@property (nonatomic,strong) UIActivityIndicatorView *loadingView;
/**
* BOOL值判断当前的播放状态
*/
@property (nonatomic,assign ) BOOL isPlaying;
/**
* 设置播放视频的USRLString,可以是本地的路径也可以是http的网络路径
*/
@property (nonatomic,copy) NSString *URLString;
/**
* 跳到time处播放
* @param seekTime这个时刻,这个时间点
*/
@property (nonatomic, assign) double seekTime;
/**
* 进度条的颜色
* @param progressColor
*/
@property (nonatomic,strong) UIColor *progressColor;
/**
* 播放
*/
- (void)play;
/**
* 暂停
*/
- (void)pause;
/**
* 获取正在播放的时间点
*
* @return double的一个时间点
*/
- (double)currentTime;
/**
* 重置播放器
*/
- (void)resetKYVedioPlayer;
/**
* 全屏显示播放
* @param interfaceOrientation 方向
* @param player 当前播放器
* @param fatherView 当前父视图
**/
-(void)showFullScreenWithInterfaceOrientation:(UIInterfaceOrientation )interfaceOrientation player:(KYVedioPlayer *)player withFatherView:(UIView *)fatherView;
/**
* 小屏幕显示播放
* @param player 当前播放器
* @param fatherView 当前父视图
* @param playerFrame 小屏幕的Frame
**/
-(void)showSmallScreenWithPlayer:(KYVedioPlayer *)player withFatherView:(UIView *)fatherView withFrame:(CGRect )playerFrame;
@end
NS_ASSUME_NONNULL_END
| kingly09/KYVedioPlayer |
<|start_filename|>models/FAC/kernelconv2d/KernelConv2D_kernel.h<|end_filename|>
#ifdef __cplusplus
extern "C" {
#endif
int KernelConv2D_forward_cuda_kernel(
at::Tensor& input,
at::Tensor& kernel,
int kernel_size,
at::Tensor& output,
cudaStream_t stream
);
int KernelConv2D_backward_cuda_kernel(
at::Tensor& input,
at::Tensor& kernel,
int kernel_size,
at::Tensor& grad_output,
at::Tensor& grad_input,
at::Tensor& grad_kernel,
cudaStream_t stream
);
#ifdef __cplusplus
}
#endif
| ShuaiWangUESTC/CSSTN |
<|start_filename|>src/android/com/sofienvppp2/VideoPicturePreviewPicker/VideoPicturePreviewPickerV2.java<|end_filename|>
/**
* An Image Picker Plugin for Cordova/PhoneGap.
*/
package com.sofienvppp2;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import android.content.pm.PackageManager;
import android.os.Build;
import android.app.Activity;
import android.content.Intent;
import android.content.Context;
public class VideoPicturePreviewPickerV2 extends CordovaPlugin {
private CallbackContext callbackContext;
private JSONObject params;
boolean Is_multiSelect = false;
boolean picture_selector = false;
boolean video_selector = false;
boolean display_video_time = false;
boolean display_preview = false;
int limit_Select = 5;
public boolean execute(String action, final JSONArray args, final CallbackContext callbackContext) throws JSONException {
this.callbackContext = callbackContext;
this.params = args.getJSONObject(0);
if (this.params.has("limit_Select"))
limit_Select = this.params.getInt("limit_Select");
if (this.params.has("Is_multiSelect"))
Is_multiSelect = this.params.getBoolean("Is_multiSelect");
if (this.params.has("picture_selector"))
picture_selector = this.params.getBoolean("picture_selector");
if (this.params.has("video_selector"))
video_selector = this.params.getBoolean("video_selector");
if (this.params.has("display_video_time"))
display_video_time = this.params.getBoolean("display_video_time");
if (this.params.has("display_preview"))
display_preview = this.params.getBoolean("display_preview");
Context context = this.cordova.getActivity().getApplicationContext();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
int read = context
.checkCallingOrSelfPermission(android.Manifest.permission.READ_EXTERNAL_STORAGE);
int write = context
.checkCallingOrSelfPermission(android.Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (read != PackageManager.PERMISSION_GRANTED && write != PackageManager.PERMISSION_GRANTED) {
Intent intent = new Intent(cordova.getActivity(), GettingPermissionsActivity.class);
if (this.cordova != null)
this.cordova.startActivityForResult((CordovaPlugin) this, intent, 0);
context.startActivity(intent);
} else {
if (action.equals("openPicker")) {
Intent intent = new Intent(cordova.getActivity(), VideoPicturePickerActivity.class);
intent.putExtra("Is_multiSelect", Is_multiSelect);
intent.putExtra("limit_Select", limit_Select);
intent.putExtra("picture_selector", picture_selector);
intent.putExtra("video_selector", video_selector);
intent.putExtra("display_video_time", display_video_time);
intent.putExtra("display_preview", display_preview);
if (this.cordova != null) {
this.cordova.startActivityForResult((CordovaPlugin) this, intent, 0);
}
}
}
} else {
if (action.equals("openPicker")) {
Intent intent = new Intent(cordova.getActivity(), VideoPicturePickerActivity.class);
intent.putExtra("Is_multiSelect", Is_multiSelect);
intent.putExtra("limit_Select", limit_Select);
intent.putExtra("picture_selector", picture_selector);
intent.putExtra("video_selector", video_selector);
intent.putExtra("display_video_time", display_video_time);
intent.putExtra("display_preview", display_preview);
if (this.cordova != null) {
this.cordova.startActivityForResult((CordovaPlugin) this, intent, 0);
}
}
}
return true;
}
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == Activity.RESULT_OK && data != null) {
String action ="";
if(data.hasExtra("PermissionAction"))
action = data.getStringExtra("PermissionAction");
if(action.equals("PermissionOK") || action.equals("PermissionNOTOK") )
{
if(action.equals("PermissionOK") )
{
Intent intent = new Intent(cordova.getActivity(), VideoPicturePickerActivity.class);
intent.putExtra("Is_multiSelect", Is_multiSelect);
intent.putExtra("limit_Select", limit_Select);
intent.putExtra("picture_selector", picture_selector);
intent.putExtra("video_selector", video_selector);
intent.putExtra("display_video_time", display_video_time);
intent.putExtra("display_preview", display_preview);
if (this.cordova != null)
{
this.cordova.startActivityForResult((CordovaPlugin) this, intent, 0);
}
}
if(action.equals("PermissionNOTOK") )
{
this.callbackContext.error("no permission is given !");
}
}
else
{
ArrayList<String> type = data.getStringArrayListExtra("type");
ArrayList<String> path = data.getStringArrayListExtra("path");
JSONArray jArray = new JSONArray();
JSONObject jResult = new JSONObject();
for (int i = 0; i < type.size(); i++) {
JSONObject jGroup = new JSONObject();
try {
jGroup.put("type", type.get(i));
jGroup.put("path", path.get(i));
jArray.put(jGroup);
// /itemDetail Name is JsonArray Name
jResult.put("selectedMedia", jArray);
} catch (JSONException e) {
e.printStackTrace();
}
}
this.callbackContext.success(jResult);
}
}
else {
this.callbackContext.error("No images selected");
}
}
}
| newsof1111/cordova-plugin-video-picture-preview-picker-V2 |
<|start_filename|>zalando/zalando_test.go<|end_filename|>
package zalando
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http/httptest"
"os"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/gin-gonic/gin"
"github.com/zalando/gin-oauth2"
"golang.org/x/oauth2"
)
// You have to have a current file $HOME/.chimp-token with only a
// valid Zalando access token.
var tokenFile string = fmt.Sprintf("%s/.chimp-token", os.Getenv("HOME"))
func getToken() (string, error) {
file, err := os.Open(tokenFile)
if err != nil {
return "not a file", err
}
defer file.Close()
data, err := ioutil.ReadAll(file)
if err != nil {
return "reading failed", err
}
return string(data), nil
}
func TestRequestTeamInfo(t *testing.T) {
ginoauth2.AuthInfoURL = OAuth2Endpoint.TokenURL
accessToken, err := getToken()
if err != nil {
fmt.Printf("ERR: Could not get Access Token from file, caused by %q.", accessToken)
t.FailNow()
}
token := oauth2.Token{
AccessToken: accessToken,
TokenType: "Bearer",
Expiry: time.Now().Add(time.Duration(600) * time.Second),
}
tc, err := ginoauth2.GetTokenContainer(&token)
if err != nil {
fmt.Printf("ERR: Could not get TokenContainer from ginoauth2.")
t.FailNow()
}
resp, err := RequestTeamInfo(tc, TeamAPI)
if err != nil {
fmt.Printf("ERR: Could not get TeamInfo for TokenContainer from TeamAPI.")
t.FailNow()
}
var data []TeamInfo
err = json.Unmarshal(resp, &data)
if err != nil {
fmt.Printf("ERR: Could not unmarshal json data.")
t.FailNow()
}
fmt.Printf("%+v\n", data)
}
func TestScopeCheck(t *testing.T) {
// given
tc := &ginoauth2.TokenContainer{
Token: &oauth2.Token{
AccessToken: "sd<PASSWORD>.",
TokenType: "Bearer",
RefreshToken: "",
},
Scopes: map[string]interface{}{
"my-scope-1": true,
"my-scope-2": true,
"uid": "stups_marilyn-updater",
},
GrantType: "password",
Realm: "/services",
}
ctx, _ := gin.CreateTestContext(httptest.NewRecorder())
// when
checkFn := ScopeCheck("name", "my-scope-1")
result := checkFn(tc, ctx)
// then
assert.True(t, result)
scopeVal, scopeOk := ctx.Get("my-scope-1")
assert.True(t, scopeOk)
assert.Equal(t, true, scopeVal)
uid, uidOk := ctx.Get("uid")
assert.True(t, uidOk)
assert.Equal(t, "stups_marilyn-updater", uid)
}
func TestScopeAndCheck(t *testing.T) {
// given
tc := &ginoauth2.TokenContainer{
Token: &oauth2.Token{
AccessToken: "sd<PASSWORD>.",
TokenType: "Bearer",
RefreshToken: "",
},
Scopes: map[string]interface{}{
"my-scope-1": true,
"my-scope-2": true,
"uid": "stups_marilyn-updater",
},
GrantType: "password",
Realm: "/services",
}
ctx, _ := gin.CreateTestContext(httptest.NewRecorder())
// when
checkFn := ScopeAndCheck("name", "uid", "my-scope-2")
result := checkFn(tc, ctx)
// then
assert.True(t, result)
uidVal, uidOk := ctx.Get("uid")
scopeVal, scopeOk := ctx.Get("my-scope-2")
assert.True(t, uidOk)
assert.Equal(t, "stups_marilyn-updater", uidVal)
assert.True(t, scopeOk)
assert.Equal(t, true, scopeVal)
}
<|start_filename|>example/zalando/main.go<|end_filename|>
// Zalando specific example.
package main
import (
"flag"
"fmt"
"time"
"github.com/gin-gonic/gin"
"github.com/golang/glog"
"github.com/szuecs/gin-glog"
"github.com/zalando/gin-oauth2"
"github.com/zalando/gin-oauth2/zalando"
)
var USERS []zalando.AccessTuple = []zalando.AccessTuple{
{"/employees", "sszuecs", "<NAME>"},
{"/employees", "njuettner", "<NAME>"},
}
var TEAMS []zalando.AccessTuple = []zalando.AccessTuple{
{"teams", "opensourceguild", "OpenSource"},
{"teams", "tm", "Platform Engineering / System"},
{"teams", "teapot", "Platform / Cloud API"},
}
var SERVICES []zalando.AccessTuple = []zalando.AccessTuple{
{"services", "foo", "Fooservice"},
}
func main() {
flag.Parse()
router := gin.New()
router.Use(ginglog.Logger(3 * time.Second))
router.Use(ginoauth2.RequestLogger([]string{"uid"}, "data"))
router.Use(gin.Recovery())
ginoauth2.VarianceTimer = 300 * time.Millisecond // defaults to 30s
public := router.Group("/api")
public.GET("/", func(c *gin.Context) {
c.JSON(200, gin.H{"message": "Hello to public world"})
})
private := router.Group("/api/private")
privateGroup := router.Group("/api/privateGroup")
privateUser := router.Group("/api/privateUser")
privateService := router.Group("/api/privateService")
glog.Infof("Register allowed users: %+v and groups: %+v and services: %+v", USERS, TEAMS, SERVICES)
private.Use(ginoauth2.AuthChain(zalando.OAuth2Endpoint, zalando.UidCheck(USERS), zalando.GroupCheck(TEAMS), zalando.UidCheck(SERVICES)))
privateGroup.Use(ginoauth2.Auth(zalando.GroupCheck(TEAMS), zalando.OAuth2Endpoint))
privateUser.Use(ginoauth2.Auth(zalando.UidCheck(USERS), zalando.OAuth2Endpoint))
//privateService.Use(ginoauth2.Auth(zalando.UidCheck(SERVICES), zalando.OAuth2Endpoint))
privateService.Use(ginoauth2.Auth(zalando.ScopeAndCheck("uidcheck", "uid", "bar"), zalando.OAuth2Endpoint))
private.GET("/", func(c *gin.Context) {
c.JSON(200, gin.H{"message": "Hello from private for groups and users"})
})
privateGroup.GET("/", func(c *gin.Context) {
uid, okUid := c.Get("uid")
if team, ok := c.Get("team"); ok && okUid {
c.JSON(200, gin.H{"message": fmt.Sprintf("Hello from private for groups to %s member of %s", uid, team)})
} else {
c.JSON(200, gin.H{"message": "Hello from private for groups without uid and team"})
}
})
privateUser.GET("/", func(c *gin.Context) {
if v, ok := c.Get("cn"); ok {
c.JSON(200, gin.H{"message": fmt.Sprintf("Hello from private for users to %s", v)})
} else {
c.JSON(200, gin.H{"message": "Hello from private for users without cn"})
}
})
privateService.GET("/", func(c *gin.Context) {
if v, ok := c.Get("cn"); ok {
c.JSON(200, gin.H{"message": fmt.Sprintf("Hello from private for services to %s", v)})
} else {
c.JSON(200, gin.H{"message": "Hello from private for services without cn"})
}
})
glog.Info("bootstrapped application")
router.Run(":8081")
}
<|start_filename|>github/github.go<|end_filename|>
// Package github provides you access to Github's OAuth2
// infrastructure.
package github
import (
"crypto/rand"
"encoding/base64"
"encoding/gob"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"github.com/gin-gonic/contrib/sessions"
"github.com/gin-gonic/gin"
"github.com/golang/glog"
"github.com/google/go-github/github"
"golang.org/x/oauth2"
oauth2gh "golang.org/x/oauth2/github"
)
// Credentials stores google client-ids.
type Credentials struct {
ClientID string `json:"clientid"`
ClientSecret string `json:"secret"`
}
var (
conf *oauth2.Config
cred Credentials
state string
store sessions.CookieStore
)
func randToken() string {
b := make([]byte, 32)
if _, err := rand.Read(b); err != nil {
glog.Fatalf("[Gin-OAuth] Failed to read rand: %v\n", err)
}
return base64.StdEncoding.EncodeToString(b)
}
func Setup(redirectURL, credFile string, scopes []string, secret []byte) {
store = sessions.NewCookieStore(secret)
var c Credentials
file, err := ioutil.ReadFile(credFile)
if err != nil {
glog.Fatalf("[Gin-OAuth] File error: %v\n", err)
}
err = json.Unmarshal(file, &c)
if err != nil {
glog.Fatalf("[Gin-OAuth] Failed to unmarshal client credentials: %v\n", err)
}
conf = &oauth2.Config{
ClientID: c.ClientID,
ClientSecret: c.ClientSecret,
RedirectURL: redirectURL,
Scopes: scopes,
Endpoint: oauth2gh.Endpoint,
}
}
func Session(name string) gin.HandlerFunc {
return sessions.Sessions(name, store)
}
func LoginHandler(ctx *gin.Context) {
state = randToken()
session := sessions.Default(ctx)
session.Set("state", state)
session.Save()
ctx.Writer.Write([]byte("<html><title>Golang Github</title> <body> <a href='" + GetLoginURL(state) + "'><button>Login with GitHub!</button> </a> </body></html>"))
}
func GetLoginURL(state string) string {
return conf.AuthCodeURL(state)
}
type AuthUser struct {
Login string `json:"login"`
Name string `json:"name"`
Email string `json:"email"`
Company string `json:"company"`
URL string `json:"url"`
}
func init() {
gob.Register(AuthUser{})
}
func Auth() gin.HandlerFunc {
return func(ctx *gin.Context) {
var (
ok bool
authUser AuthUser
user *github.User
)
// Handle the exchange code to initiate a transport.
session := sessions.Default(ctx)
mysession := session.Get("ginoauthgh")
if authUser, ok = mysession.(AuthUser); ok {
ctx.Set("user", authUser)
ctx.Next()
return
}
retrievedState := session.Get("state")
if retrievedState != ctx.Query("state") {
ctx.AbortWithError(http.StatusUnauthorized, fmt.Errorf("Invalid session state: %s", retrievedState))
return
}
// TODO: oauth2.NoContext -> context.Context from stdlib
tok, err := conf.Exchange(oauth2.NoContext, ctx.Query("code"))
if err != nil {
ctx.AbortWithError(http.StatusBadRequest, fmt.Errorf("Failed to do exchange: %v", err))
return
}
client := github.NewClient(conf.Client(oauth2.NoContext, tok))
user, _, err = client.Users.Get(oauth2.NoContext, "")
if err != nil {
ctx.AbortWithError(http.StatusBadRequest, fmt.Errorf("Failed to get user: %v", err))
return
}
// save userinfo, which could be used in Handlers
authUser = AuthUser{
Login: *user.Login,
Name: *user.Name,
URL: *user.URL,
}
ctx.Set("user", authUser)
// populate cookie
session.Set("ginoauthgh", authUser)
if err := session.Save(); err != nil {
glog.Errorf("Failed to save session: %v", err)
}
}
}
| blackironj/gin-oauth2 |
<|start_filename|>wifi.h<|end_filename|>
char mqtt_server[80] = "example.tld";
char username[24] = "";
char password[24] = "";
| dl1mx/esp8266-geigercounter |
<|start_filename|>addon.json<|end_filename|>
{
"package-name": "djangocms-disqus",
"installed-apps": [
"connected_accounts",
"connected_accounts.providers",
"djangocms_disqus"
]
}
| mishbahr/djangocms-disqus |
<|start_filename|>Dockerfile<|end_filename|>
FROM golang:alpine as builder
RUN apk update && apk upgrade && apk add --no-cache git
ADD ./ ./gomatrix
RUN mkdir /build
WORKDIR ./gomatrix
ENV GOOS=linux GOARCH=amd64 CGO_ENABLED=0
RUN go build -a -ldflags="-w -s" -installsuffix cgo -o /build/gomatrix .
FROM scratch
MAINTAINER <NAME> <<EMAIL>>
COPY --from=builder /build/gomatrix /gomatrix
ENTRYPOINT ["/gomatrix"]
| ryanmarshallott/gomatrix |
<|start_filename|>test/fixtures/import-with-comment/expected.7.es2015.js<|end_filename|>
Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require('my-module'));
});
Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require('my-module'));
});
<|start_filename|>test/fixtures/non-string-argument/expected.6.js<|end_filename|>
Promise.resolve(`${{ 'answer': 42 }}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
Promise.resolve(`${['foo', 'bar']}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
Promise.resolve(`${42}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
Promise.resolve(`${void 0}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
Promise.resolve(`${undefined}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
Promise.resolve(`${null}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
Promise.resolve(`${true}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
Promise.resolve(`${Symbol()}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
<|start_filename|>test/testPlugin.js<|end_filename|>
import * as babel6 from 'babel-core';
// This can't be imported in node 4.
// eslint-disable-next-line global-require
const babel7lazy = () => require('@babel/core');
export default function testPlugin(version, code, presets, plugins, options = {}) {
const transform = version === 6 ? babel6.transform : babel7lazy().transformSync;
const helpers = version === 6 ? 'external-helpers' : '@babel/external-helpers';
const result = transform(code, {
presets: [].concat(presets || []),
plugins: [].concat(plugins || [], [['./src/index.js', options], helpers]),
});
return result.code;
}
<|start_filename|>test/index.js<|end_filename|>
import test from 'tape';
import { join } from 'path';
import {
readdirSync, statSync, readFileSync, writeFileSync,
} from 'fs';
import testPlugin from './testPlugin';
const FIXTURE_PATH = join(__dirname, 'fixtures');
const testFolders = readdirSync(FIXTURE_PATH).filter((file) => (
statSync(join(FIXTURE_PATH, file)).isDirectory()
));
// Babel 7 only supports node 6+
const versions = Number(process.version.match(/\d+/)[0]) >= 6 ? [6, 7] : [6];
const pkgs = {
6: {
env: 'env',
es2015: 'es2015',
templates: 'transform-es2015-template-literals',
},
7: {
env: '@babel/env',
templates: '@babel/transform-template-literals',
},
};
function normalize(output) {
return `${output.trim()}\n`;
}
function tryRead(folder, file) {
try {
return readFileSync(join(FIXTURE_PATH, folder, file), 'utf8');
} catch (e) {
return '';
}
}
function assertOrWrite(st, result, expected, folder, file) {
if (process.env.OVERWRITE) {
writeFileSync(join(FIXTURE_PATH, folder, file), normalize(result));
} else {
st.equal(normalize(result), normalize(expected));
}
}
test('babel-plugin-dynamic-import-node', (t) => {
testFolders.forEach((folderName) => {
const actual = tryRead(folderName, 'actual.js');
versions.forEach((version) => {
const expected = tryRead(folderName, `expected.${version}.js`);
const expectedES2015 = tryRead(folderName, `expected.${version}.es2015.js`);
const expectedNoInterop = tryRead(folderName, `expected.${version}.noInterop.js`);
t.test(`babel ${version} - works with ${folderName}`, (st) => {
const result = testPlugin(version, actual);
assertOrWrite(st, result, expected, folderName, `expected.${version}.js`);
st.end();
});
t.test(`babel ${version} - works with ${folderName} and the 'noInterop': true option`, (st) => {
const result = testPlugin(version, actual, [], [], { noInterop: true });
assertOrWrite(st, result, expectedNoInterop, folderName, `expected.${version}.noInterop.js`);
st.end();
});
t.test(`babel ${version} - works with ${folderName} and the env preset`, (st) => {
const result = testPlugin(
version,
actual,
// Disable modules, otherwise it includes a different version of this plugin
[[pkgs[version].env, { modules: false }]],
[[pkgs[version].templates, { spec: true }]],
);
assertOrWrite(st, result, expectedES2015, folderName, `expected.${version}.es2015.js`);
st.end();
});
if (version === 6 && !process.env.OVERWRITE
// The es2015 and env presets have two different output with async functions
&& folderName !== 'dynamic-argument') {
t.test(`babel ${version} - works with ${folderName} and the es2015 preset`, (st) => {
const result = testPlugin(
version,
actual,
[[pkgs[version].es2015, { modules: false }]],
[[pkgs[version].templates, { spec: true }]],
);
st.equal(normalize(result), normalize(expectedES2015));
st.end();
});
}
});
});
t.end();
});
<|start_filename|>test/fixtures/dynamic-argument/actual.js<|end_filename|>
import(MODULE);
let i = 0;
import(i++);
import(fn());
async () => import(await "x");
function* f() { import(yield "x"); }
<|start_filename|>test/fixtures/basic-import/expected.7.noInterop.js<|end_filename|>
const testModule = Promise.resolve().then(() => require('test-module'));
<|start_filename|>test/fixtures/non-string-argument/actual.js<|end_filename|>
import({ 'answer': 42 });
import(['foo', 'bar']);
import(42);
import(void 0);
import(undefined);
import(null);
import(true);
import(Symbol());
<|start_filename|>test/fixtures/template-argument/actual.js<|end_filename|>
import(`1`);
import(tag`2`);
import(`3-${MODULE}`);
<|start_filename|>test/fixtures/basic-import/expected.7.es2015.js<|end_filename|>
var testModule = Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require('test-module'));
});
<|start_filename|>test/fixtures/dynamic-argument/expected.6.es2015.js<|end_filename|>
var _this = this;
var _marked = /*#__PURE__*/regeneratorRuntime.mark(f);
Promise.resolve("" + String(MODULE)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
var i = 0;
Promise.resolve("" + i++).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("" + String(fn())).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
babelHelpers.asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee() {
return regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.t0 = Promise;
_context.t1 = String;
_context.next = 4;
return "x";
case 4:
_context.t2 = _context.sent;
_context.t3 = (0, _context.t1)(_context.t2);
_context.t4 = "" + _context.t3;
_context.t5 = function (s) {
return babelHelpers.interopRequireWildcard(require(s));
};
return _context.abrupt("return", _context.t0.resolve.call(_context.t0, _context.t4).then(_context.t5));
case 9:
case "end":
return _context.stop();
}
}
}, _callee, _this);
}));
function f() {
return regeneratorRuntime.wrap(function f$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
_context2.t0 = Promise;
_context2.t1 = String;
_context2.next = 4;
return "x";
case 4:
_context2.t2 = _context2.sent;
_context2.t3 = (0, _context2.t1)(_context2.t2);
_context2.t4 = "" + _context2.t3;
_context2.t5 = function (s) {
return babelHelpers.interopRequireWildcard(require(s));
};
_context2.t0.resolve.call(_context2.t0, _context2.t4).then(_context2.t5);
case 9:
case "end":
return _context2.stop();
}
}
}, _marked, this);
}
<|start_filename|>test/fixtures/basic-import/expected.7.js<|end_filename|>
const testModule = Promise.resolve().then(() => babelHelpers.interopRequireWildcard(require('test-module')));
<|start_filename|>test/fixtures/import-with-comment/actual.js<|end_filename|>
import(/* test comment */ 'my-module');
import('my-module' /* test comment */ );
<|start_filename|>test/fixtures/template-argument/expected.7.js<|end_filename|>
Promise.resolve().then(() => babelHelpers.interopRequireWildcard(require(`1`)));
Promise.resolve(`${tag`2`}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
Promise.resolve(`3-${MODULE}`).then(s => babelHelpers.interopRequireWildcard(require(s)));
<|start_filename|>test/fixtures/template-argument/expected.6.noInterop.js<|end_filename|>
Promise.resolve().then(() => require(`1`));
Promise.resolve(`${tag`2`}`).then(s => require(s));
Promise.resolve(`3-${MODULE}`).then(s => require(s));
<|start_filename|>test/fixtures/dynamic-argument/expected.6.noInterop.js<|end_filename|>
Promise.resolve(`${MODULE}`).then(s => require(s));
let i = 0;
Promise.resolve(`${i++}`).then(s => require(s));
Promise.resolve(`${fn()}`).then(s => require(s));
async () => Promise.resolve(`${await "x"}`).then(s => require(s));
function* f() {
Promise.resolve(`${yield "x"}`).then(s => require(s));
}
<|start_filename|>src/utils.js<|end_filename|>
export function getImportSource(t, callNode) {
const importArguments = callNode.arguments;
const [importPath] = importArguments;
const isString = t.isStringLiteral(importPath) || t.isTemplateLiteral(importPath);
if (isString) {
t.removeComments(importPath);
return importPath;
}
return t.templateLiteral([
t.templateElement({ raw: '', cooked: '' }),
t.templateElement({ raw: '', cooked: '' }, true),
], importArguments);
}
export function createDynamicImportTransform({ template, types: t }) {
const builders = {
static: {
interop: template('Promise.resolve().then(() => INTEROP(require(SOURCE)))'),
noInterop: template('Promise.resolve().then(() => require(SOURCE))'),
},
dynamic: {
interop: template('Promise.resolve(SOURCE).then(s => INTEROP(require(s)))'),
noInterop: template('Promise.resolve(SOURCE).then(s => require(s))'),
},
};
const visited = typeof WeakSet === 'function' && new WeakSet();
const isString = (node) => t.isStringLiteral(node)
|| (t.isTemplateLiteral(node) && node.expressions.length === 0);
return (context, path) => {
if (visited) {
if (visited.has(path)) {
return;
}
visited.add(path);
}
const SOURCE = getImportSource(t, path.parent);
const builder = isString(SOURCE) ? builders.static : builders.dynamic;
const newImport = context.opts.noInterop
? builder.noInterop({ SOURCE })
: builder.interop({ SOURCE, INTEROP: context.addHelper('interopRequireWildcard') });
path.parentPath.replaceWith(newImport);
};
}
<|start_filename|>test/fixtures/import-with-comment/expected.7.js<|end_filename|>
Promise.resolve().then(() => babelHelpers.interopRequireWildcard(require('my-module')));
Promise.resolve().then(() => babelHelpers.interopRequireWildcard(require('my-module')));
<|start_filename|>test/fixtures/template-argument/expected.6.es2015.js<|end_filename|>
var _templateObject = babelHelpers.taggedTemplateLiteral(["2"], ["2"]);
Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require("1"));
});
Promise.resolve("" + String(tag(_templateObject))).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("3-" + String(MODULE)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
<|start_filename|>test/fixtures/template-argument/expected.7.es2015.js<|end_filename|>
function _templateObject() {
var data = babelHelpers.taggedTemplateLiteral(["2"]);
_templateObject = function _templateObject() {
return data;
};
return data;
}
Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require("1"));
});
Promise.resolve("".concat(tag(_templateObject()))).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("3-".concat(MODULE)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
<|start_filename|>test/fixtures/chained-import/expected.6.es2015.js<|end_filename|>
Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require('test-module'));
}).then(function () {
return Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require('test-module-2'));
});
});
Promise.all([Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require('test-1'));
}), Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require('test-2'));
}), Promise.resolve().then(function () {
return babelHelpers.interopRequireWildcard(require('test-3'));
})]).then(function () {});
<|start_filename|>test/fixtures/dynamic-argument/expected.7.es2015.js<|end_filename|>
var _marked = /*#__PURE__*/regeneratorRuntime.mark(f);
Promise.resolve("".concat(MODULE)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
var i = 0;
Promise.resolve("".concat(i++)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("".concat(fn())).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
/*#__PURE__*/
babelHelpers.asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee() {
return regeneratorRuntime.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.t0 = Promise;
_context.t1 = "";
_context.next = 4;
return "x";
case 4:
_context.t2 = _context.sent;
_context.t3 = _context.t1.concat.call(_context.t1, _context.t2);
return _context.abrupt("return", _context.t0.resolve.call(_context.t0, _context.t3).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
}));
case 7:
case "end":
return _context.stop();
}
}
}, _callee);
}));
function f() {
return regeneratorRuntime.wrap(function f$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
_context2.t0 = Promise;
_context2.t1 = "";
_context2.next = 4;
return "x";
case 4:
_context2.t2 = _context2.sent;
_context2.t3 = _context2.t1.concat.call(_context2.t1, _context2.t2);
_context2.t0.resolve.call(_context2.t0, _context2.t3).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
case 7:
case "end":
return _context2.stop();
}
}
}, _marked);
}
<|start_filename|>src/index.js<|end_filename|>
import { createDynamicImportTransform } from './utils';
export default function (api) {
const transformImport = createDynamicImportTransform(api);
return {
// NOTE: Once we drop support for Babel <= v6 we should
// update this to import from @babel/plugin-syntax-dynamic-import.
// https://www.npmjs.com/package/@babel/plugin-syntax-dynamic-import
manipulateOptions(opts, parserOpts) {
parserOpts.plugins.push('dynamicImport');
},
visitor: {
Import(path) {
transformImport(this, path);
},
},
};
}
<|start_filename|>test/fixtures/non-string-argument/expected.6.es2015.js<|end_filename|>
Promise.resolve('' + String({ 'answer': 42 })).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve('' + String(['foo', 'bar'])).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve('' + 42).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve('' + String(void 0)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve('' + String(undefined)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve('' + String(null)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve('' + String(true)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve('' + String(Symbol())).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
<|start_filename|>test/fixtures/non-string-argument/expected.7.es2015.js<|end_filename|>
Promise.resolve("".concat({
'answer': 42
})).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("".concat(['foo', 'bar'])).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("".concat(42)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("".concat(void 0)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("".concat(undefined)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("".concat(null)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("".concat(true)).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
Promise.resolve("".concat(Symbol())).then(function (s) {
return babelHelpers.interopRequireWildcard(require(s));
});
<|start_filename|>test/fixtures/nested-import/expected.7.js<|end_filename|>
function getModule(path) {
return Promise.resolve().then(() => babelHelpers.interopRequireWildcard(require('test-module')));
}
getModule().then(() => {});
| gururajrkatti/babel-plugin-dynamic-import-node |
<|start_filename|>RecastDemo/Contrib/metis/source/test/mtest.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* mtest.c
*
* This file is a comprehensive tester for all the graph partitioning/ordering
* routines of METIS
*
* Started 9/18/98
* George
*
* $Id: mtest.c,v 1.1 2002/08/10 04:34:53 karypis Exp $
*
*/
#include <metis.h>
#include <metislib.h>
#include "../libmetis/proto.h"
#include "proto.h"
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, nparts, options[10];
idxtype *part;
float lbvec[MAXNCON], rubvec[MAXNCON];
GraphType graph;
idxtype numflag = 0, wgtflag = 0, edgecut;
if (argc != 2) {
mprintf("Usage: %s <GraphFile>\n",argv[0]);
exit(0);
}
ReadGraph(&graph, argv[1], &wgtflag);
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Graph Information ---------------------------------------------------\n");
mprintf(" Name: %s, #Vertices: %D, #Edges: %D\n", argv[1], graph.nvtxs, graph.nedges/2);
Test_PartGraph(graph.nvtxs, graph.xadj, graph.adjncy);
Test_PartGraphV(graph.nvtxs, graph.xadj, graph.adjncy);
Test_PartGraphmC(graph.nvtxs, graph.xadj, graph.adjncy);
Test_ND(graph.nvtxs, graph.xadj, graph.adjncy);
mprintf("\n---------------------------------------------------------------------\n");
mprintf(" Testing completed.\n");
mprintf("**********************************************************************\n");
gk_free((void **)&graph.xadj, &graph.adjncy, &graph.vwgt, &graph.adjwgt, LTERM);
}
/*************************************************************************
* This function tests the regular graph partitioning routines
**************************************************************************/
void Test_PartGraph(idxtype nvtxs, idxtype *xadj, idxtype *adjncy)
{
idxtype i, j, jj, k, tstnum, rcode;
idxtype nparts, numflag, wgtflag, edgecut, options[10];
idxtype *part, *vwgt, *adjwgt;
float tpwgts[256];
vwgt = idxmalloc(nvtxs, "vwgt");
for (i=0; i<nvtxs; i++)
vwgt[i] = RandomInRange(10);
adjwgt = idxmalloc(xadj[nvtxs], "adjwgt");
for (i=0; i<nvtxs; i++) {
for (j=xadj[i]; j<xadj[i+1]; j++) {
k = adjncy[j];
if (i < k) {
adjwgt[j] = 1+RandomInRange(5);
for (jj=xadj[k]; jj<xadj[k+1]; jj++) {
if (adjncy[jj] == i) {
adjwgt[jj] = adjwgt[j];
break;
}
}
}
}
}
part = idxmalloc(nvtxs, "part");
tpwgts[0] = .1;
tpwgts[1] = .2;
tpwgts[2] = .3;
tpwgts[3] = .1;
tpwgts[4] = .05;
tpwgts[5] = .25;
/*===========================================================================*/
mprintf("\nTesting METIS_PartGraphRecursive ------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0; wgtflag = 0; nparts = 20;
options[0] = 0;
METIS_PartGraphRecursive(&nvtxs, xadj, adjncy, NULL, NULL, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, NULL, NULL, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 20;
options[0] = 0;
METIS_PartGraphRecursive(&nvtxs, xadj, adjncy, NULL, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, NULL, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 2; nparts = 20;
options[0] = 0;
METIS_PartGraphRecursive(&nvtxs, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, NULL, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 0;
METIS_PartGraphRecursive(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_PartGraphRecursive(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_PartGraphRecursive(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
/*===========================================================================*/
mprintf("\nTesting METIS_WPartGraphRecursive -----------------------------------\n ");
tstnum = 1;
/**/
numflag = 0; wgtflag = 0; nparts = 6;
options[0] = 0;
METIS_WPartGraphRecursive(&nvtxs, xadj, adjncy, NULL, NULL, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, NULL, NULL, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 6;
options[0] = 0;
METIS_WPartGraphRecursive(&nvtxs, xadj, adjncy, NULL, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, NULL, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 2; nparts = 6;
options[0] = 0;
METIS_WPartGraphRecursive(&nvtxs, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, NULL, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 0;
METIS_WPartGraphRecursive(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_WPartGraphRecursive(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_WPartGraphRecursive(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
/*===========================================================================*/
mprintf("\nTesting METIS_PartGraphKway -----------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0; wgtflag = 0; nparts = 20;
options[0] = 0;
METIS_PartGraphKway(&nvtxs, xadj, adjncy, NULL, NULL, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, NULL, NULL, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 20;
options[0] = 0;
METIS_PartGraphKway(&nvtxs, xadj, adjncy, NULL, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, NULL, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 2; nparts = 20;
options[0] = 0;
METIS_PartGraphKway(&nvtxs, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, NULL, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 0;
METIS_PartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_PartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_PartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 2; options[4] = 0;
METIS_PartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 3; options[4] = 0;
METIS_PartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
/*===========================================================================*/
mprintf("\nTesting METIS_WPartGraphKway ----------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0; wgtflag = 0; nparts = 6;
options[0] = 0;
METIS_WPartGraphKway(&nvtxs, xadj, adjncy, NULL, NULL, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, NULL, NULL, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 6;
options[0] = 0;
METIS_WPartGraphKway(&nvtxs, xadj, adjncy, NULL, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, NULL, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 2; nparts = 6;
options[0] = 0;
METIS_WPartGraphKway(&nvtxs, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, NULL, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 0;
METIS_WPartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_WPartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_WPartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 2; options[4] = 0;
METIS_WPartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 3; options[4] = 0;
METIS_WPartGraphKway(&nvtxs, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, tpwgts, options, &edgecut, part);
if ((rcode = VerifyWPart(nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
gk_free((void **)&vwgt, &adjwgt, &part, LTERM);
}
/*************************************************************************
* This function verifies that the partitioning was computed correctly
**************************************************************************/
int VerifyPart(idxtype nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt,
idxtype *adjwgt, idxtype nparts, idxtype edgecut, idxtype *part)
{
idxtype i, j, k, cut, vfree=0, efree=0, rcode=0;
idxtype *pwgts;
if (part[idxargmax(nvtxs, part)] != nparts-1)
return 1; /* the total number of partitions is different than nparts */
/* compute the cut and the pwgts */
if (vwgt == NULL) {
vfree = 1;
vwgt = idxsmalloc(nvtxs, 1, "vwgt");
}
if (adjwgt == NULL) {
efree = 1;
adjwgt = idxsmalloc(xadj[nvtxs], 1, "adjwgt");
}
pwgts = idxsmalloc(nparts, 0, "pwgts");
for (cut=0, i=0; i<nvtxs; i++) {
pwgts[part[i]] += vwgt[i];
for (j=xadj[i]; j<xadj[i+1]; j++)
cut += (part[i] != part[adjncy[j]] ? adjwgt[j] : 0);
}
if (cut != 2*edgecut)
rcode = 2;
if (nparts*pwgts[idxargmax(nparts, pwgts)] > 1.10*idxsum(nparts, pwgts, 1))
rcode = 3;
if (vfree)
gk_free((void **)&vwgt, LTERM);
if (efree)
gk_free((void **)&adjwgt, LTERM);
gk_free((void **)&pwgts, LTERM);
MALLOC_CHECK(NULL);
return rcode;
}
/*************************************************************************
* This function verifies that the partitioning was computed correctly
**************************************************************************/
int VerifyWPart(idxtype nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt,
idxtype *adjwgt, idxtype nparts, float *tpwgts, idxtype edgecut, idxtype *part)
{
idxtype i, j, k, tvwgt, cut, vfree=0, efree=0, rcode=0;
idxtype *pwgts;
if (part[idxargmax(nvtxs, part)] != nparts-1)
return 1; /* the total number of partitions is different than nparts */
/* compute the cut and the pwgts */
if (vwgt == NULL) {
vfree = 1;
vwgt = idxsmalloc(nvtxs, 1, "vwgt");
}
if (adjwgt == NULL) {
efree = 1;
adjwgt = idxsmalloc(xadj[nvtxs], 1, "adjwgt");
}
pwgts = idxsmalloc(nparts, 0, "pwgts");
for (cut=0, i=0; i<nvtxs; i++) {
pwgts[part[i]] += vwgt[i];
for (j=xadj[i]; j<xadj[i+1]; j++)
cut += (part[i] != part[adjncy[j]] ? adjwgt[j] : 0);
}
if (cut != 2*edgecut)
rcode = 2;
tvwgt = idxsum(nparts, pwgts, 1);
for (i=0; i<nparts; i++) {
if (pwgts[i] > 1.10*tpwgts[i]*tvwgt) {
rcode = 3;
break;
}
}
if (vfree)
gk_free((void **)&vwgt, LTERM);
if (efree)
gk_free((void **)&adjwgt, LTERM);
gk_free((void **)&pwgts, LTERM);
MALLOC_CHECK(NULL);
return rcode;
}
/*************************************************************************
* This function tests the regular graph partitioning routines
**************************************************************************/
void Test_PartGraphV(idxtype nvtxs, idxtype *xadj, idxtype *adjncy)
{
idxtype i, j, jj, k, tstnum, rcode;
idxtype nparts, numflag, wgtflag, totalv, options[10];
idxtype *part, *vwgt, *vsize;
float tpwgts[256];
vwgt = idxmalloc(nvtxs, "vwgt");
for (i=0; i<nvtxs; i++)
vwgt[i] = RandomInRange(10);
vsize = idxmalloc(nvtxs, "vsize");
for (i=0; i<nvtxs; i++)
vsize[i] = RandomInRange(10);
part = idxmalloc(nvtxs, "part");
tpwgts[0] = .1;
tpwgts[1] = .2;
tpwgts[2] = .3;
tpwgts[3] = .1;
tpwgts[4] = .05;
tpwgts[5] = .25;
/*===========================================================================*/
mprintf("\nTesting METIS_PartGraphVKway ----------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0; wgtflag = 0; nparts = 20;
options[0] = 0;
METIS_PartGraphVKway(&nvtxs, xadj, adjncy, NULL, NULL, &wgtflag, &numflag,
&nparts, options, &totalv, part);
if ((rcode = VerifyPartV(nvtxs, xadj, adjncy, NULL, NULL, nparts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 20;
options[0] = 0;
METIS_PartGraphVKway(&nvtxs, xadj, adjncy, NULL, vsize, &wgtflag, &numflag,
&nparts, options, &totalv, part);
if ((rcode = VerifyPartV(nvtxs, xadj, adjncy, NULL, vsize, nparts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 2; nparts = 20;
options[0] = 0;
METIS_PartGraphVKway(&nvtxs, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag,
&nparts, options, &totalv, part);
if ((rcode = VerifyPartV(nvtxs, xadj, adjncy, vwgt, NULL, nparts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 0;
METIS_PartGraphVKway(&nvtxs, xadj, adjncy, vwgt, vsize, &wgtflag, &numflag,
&nparts, options, &totalv, part);
if ((rcode = VerifyPartV(nvtxs, xadj, adjncy, vwgt, vsize, nparts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_PartGraphVKway(&nvtxs, xadj, adjncy, vwgt, vsize, &wgtflag, &numflag,
&nparts, options, &totalv, part);
if ((rcode = VerifyPartV(nvtxs, xadj, adjncy, vwgt, vsize, nparts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_PartGraphVKway(&nvtxs, xadj, adjncy, vwgt, vsize, &wgtflag, &numflag,
&nparts, options, &totalv, part);
if ((rcode = VerifyPartV(nvtxs, xadj, adjncy, vwgt, vsize, nparts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 20;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 3; options[4] = 0;
METIS_PartGraphVKway(&nvtxs, xadj, adjncy, vwgt, vsize, &wgtflag, &numflag,
&nparts, options, &totalv, part);
if ((rcode = VerifyPartV(nvtxs, xadj, adjncy, vwgt, vsize, nparts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
/*===========================================================================*/
mprintf("\nTesting METIS_WPartGraphVKway ---------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0; wgtflag = 0; nparts = 6;
options[0] = 0;
METIS_WPartGraphVKway(&nvtxs, xadj, adjncy, NULL, NULL, &wgtflag, &numflag,
&nparts, tpwgts, options, &totalv, part);
if ((rcode = VerifyWPartV(nvtxs, xadj, adjncy, NULL, NULL, nparts, tpwgts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 6;
options[0] = 0;
METIS_WPartGraphVKway(&nvtxs, xadj, adjncy, NULL, vsize, &wgtflag, &numflag,
&nparts, tpwgts, options, &totalv, part);
if ((rcode = VerifyWPartV(nvtxs, xadj, adjncy, NULL, vsize, nparts, tpwgts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 2; nparts = 6;
options[0] = 0;
METIS_WPartGraphVKway(&nvtxs, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag,
&nparts, tpwgts, options, &totalv, part);
if ((rcode = VerifyWPartV(nvtxs, xadj, adjncy, vwgt, NULL, nparts, tpwgts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 0;
METIS_WPartGraphVKway(&nvtxs, xadj, adjncy, vwgt, vsize, &wgtflag, &numflag,
&nparts, tpwgts, options, &totalv, part);
if ((rcode = VerifyWPartV(nvtxs, xadj, adjncy, vwgt, vsize, nparts, tpwgts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_WPartGraphVKway(&nvtxs, xadj, adjncy, vwgt, vsize, &wgtflag, &numflag,
&nparts, tpwgts, options, &totalv, part);
if ((rcode = VerifyWPartV(nvtxs, xadj, adjncy, vwgt, vsize, nparts, tpwgts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_WPartGraphVKway(&nvtxs, xadj, adjncy, vwgt, vsize, &wgtflag, &numflag,
&nparts, tpwgts, options, &totalv, part);
if ((rcode = VerifyWPartV(nvtxs, xadj, adjncy, vwgt, vsize, nparts, tpwgts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 3; nparts = 6;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 3; options[4] = 0;
METIS_WPartGraphVKway(&nvtxs, xadj, adjncy, vwgt, vsize, &wgtflag, &numflag,
&nparts, tpwgts, options, &totalv, part);
if ((rcode = VerifyWPartV(nvtxs, xadj, adjncy, vwgt, vsize, nparts, tpwgts, totalv, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
gk_free((void **)&vwgt, &vsize, &part, LTERM);
}
/*************************************************************************
* This function verifies that the partitioning was computed correctly
**************************************************************************/
int VerifyPartV(idxtype nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt,
idxtype *vsize, idxtype nparts, idxtype totalv, idxtype *part)
{
idxtype i, j, k, ttlv, vfree=0, efree=0, rcode=0;
idxtype *pwgts, *marker;
if (part[idxargmax(nvtxs, part)] != nparts-1)
return 1; /* the total number of partitions is different than nparts */
/* compute the cut and the pwgts */
if (vwgt == NULL) {
vfree = 1;
vwgt = idxsmalloc(nvtxs, 1, "vwgt");
}
if (vsize == NULL) {
efree = 1;
vsize = idxsmalloc(nvtxs, 1, "vsize");
}
pwgts = idxsmalloc(nparts, 0, "pwgts");
marker = idxsmalloc(nparts, -1, "htable");
for (ttlv=0, i=0; i<nvtxs; i++) {
pwgts[part[i]] += vwgt[i];
marker[part[i]] = i;
for (j=xadj[i]; j<xadj[i+1]; j++) {
if (marker[part[adjncy[j]]] != i) {
ttlv += vsize[i];
marker[part[adjncy[j]]] = i;
}
}
}
if (ttlv != totalv)
rcode = 2;
if (nparts*pwgts[idxargmax(nparts, pwgts)] > 1.05*idxsum(nparts, pwgts, 1))
rcode = 3;
if (vfree)
gk_free((void **)&vwgt, LTERM);
if (efree)
gk_free((void **)&vsize, LTERM);
gk_free((void **)&pwgts, &marker, LTERM);
MALLOC_CHECK(NULL);
return rcode;
}
/*************************************************************************
* This function verifies that the partitioning was computed correctly
**************************************************************************/
int VerifyWPartV(idxtype nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt,
idxtype *vsize, idxtype nparts, float *tpwgts, idxtype totalv,
idxtype *part)
{
idxtype i, j, k, tvwgt, ttlv, vfree=0, efree=0, rcode=0;
idxtype *pwgts, *marker;
if (part[idxargmax(nvtxs, part)] != nparts-1)
return 1; /* the total number of partitions is different than nparts */
/* compute the cut and the pwgts */
if (vwgt == NULL) {
vfree = 1;
vwgt = idxsmalloc(nvtxs, 1, "vwgt");
}
if (vsize == NULL) {
efree = 1;
vsize = idxsmalloc(nvtxs, 1, "vsize");
}
pwgts = idxsmalloc(nparts, 0, "pwgts");
marker = idxsmalloc(nparts, -1, "htable");
for (ttlv=0, i=0; i<nvtxs; i++) {
pwgts[part[i]] += vwgt[i];
marker[part[i]] = i;
for (j=xadj[i]; j<xadj[i+1]; j++) {
if (marker[part[adjncy[j]]] != i) {
ttlv += vsize[i];
marker[part[adjncy[j]]] = i;
}
}
}
if (ttlv != totalv)
rcode = 2;
tvwgt = idxsum(nparts, pwgts, 1);
for (i=0; i<nparts; i++) {
if (pwgts[i] > 1.05*tpwgts[i]*tvwgt) {
rcode = 3;
break;
}
}
if (vfree)
gk_free((void **)&vwgt, LTERM);
if (efree)
gk_free((void **)&vsize, LTERM);
gk_free((void **)&pwgts, &marker, LTERM);
MALLOC_CHECK(NULL);
return rcode;
}
/*************************************************************************
* This function tests the regular graph partitioning routines
**************************************************************************/
void Test_PartGraphmC(idxtype nvtxs, idxtype *xadj, idxtype *adjncy)
{
idxtype i, j, jj, k, tstnum, rcode;
idxtype nparts, ncon, numflag, wgtflag, edgecut, options[10];
idxtype *part, *vwgt, *adjwgt;
float ubvec[MAXNCON];
ncon = 3;
vwgt = idxmalloc(nvtxs*ncon, "vwgt");
for (i=0; i<ncon*nvtxs; i++)
vwgt[i] = RandomInRange(10);
adjwgt = idxmalloc(xadj[nvtxs], "adjwgt");
for (i=0; i<nvtxs; i++) {
for (j=xadj[i]; j<xadj[i+1]; j++) {
k = adjncy[j];
if (i < k) {
adjwgt[j] = 1+RandomInRange(5);
for (jj=xadj[k]; jj<xadj[k+1]; jj++) {
if (adjncy[jj] == i) {
adjwgt[jj] = adjwgt[j];
break;
}
}
}
}
}
part = idxmalloc(nvtxs, "part");
/*===========================================================================*/
mprintf("\nTesting METIS_mCPartGraphRecursive ----------------------------------\n ");
tstnum = 1;
/**/
numflag = 0; wgtflag = 0; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, NULL, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 4; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 5; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 6; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 7; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 8; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n ");
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 4; options[2] = 2; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 3; options[2] = 2; options[3] = 1; options[4] = 0;
METIS_mCPartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
mprintf("\nTesting METIS_mCPartGraphKway ---------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0; wgtflag = 0; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, NULL, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 4; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 5; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 6; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 7; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 8; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n ");
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 6; options[2] = 2; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.5; ubvec[2] = 1.05;
options[0] = 1; options[1] = 4; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 1.05; ubvec[1] = 1.5; ubvec[2] = 1.5;
options[0] = 1; options[1] = 3; options[2] = 2; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0; wgtflag = 1; nparts = 10;
ubvec[0] = 2.05; ubvec[1] = 1.05; ubvec[2] = 1.05;
options[0] = 1; options[1] = 4; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_mCPartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag,
&nparts, ubvec, options, &edgecut, part);
if ((rcode = VerifyPartmC(nvtxs, ncon, xadj, adjncy, vwgt, adjwgt, nparts, ubvec, edgecut, part)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
gk_free((void **)&vwgt, &adjwgt, &part, LTERM);
}
/*************************************************************************
* This function verifies that the partitioning was computed correctly
**************************************************************************/
int VerifyPartmC(idxtype nvtxs, idxtype ncon, idxtype *xadj, idxtype *adjncy, idxtype *vwgt,
idxtype *adjwgt, idxtype nparts, float *ubvec, idxtype edgecut, idxtype *part)
{
idxtype i, j, k, cut, efree=0, rcode=0;
idxtype *pwgts;
float lb;
if (part[idxargmax(nvtxs, part)] != nparts-1)
return 1; /* the total number of partitions is different than nparts */
if (adjwgt == NULL) {
efree = 1;
adjwgt = idxsmalloc(xadj[nvtxs], 1, "adjwgt");
}
pwgts = idxsmalloc(ncon*nparts, 0, "pwgts");
for (cut=0, i=0; i<nvtxs; i++) {
for (j=0; j<ncon; j++)
pwgts[part[i]*ncon+j] += vwgt[i*ncon+j];
for (j=xadj[i]; j<xadj[i+1]; j++)
cut += (part[i] != part[adjncy[j]] ? adjwgt[j] : 0);
}
if (cut != 2*edgecut)
rcode = 2;
/*
mprintf("\n");
for (i=0; i<nparts; i++) {
for (j=0; j<ncon; j++)
mprintf("%5D ", pwgts[i*ncon+j]);
mprintf("\n");
}
mprintf("---------------------------------\n");
for (j=0; j<ncon; j++)
mprintf("%5D ", idxsum(nparts, pwgts+j, ncon));
mprintf("\n---------------------------------\n");
for (j=0; j<ncon; j++)
mprintf("%5D ", pwgts[ncon*idxargmax_strd(nparts, pwgts+j, ncon)+j]);
mprintf("\n%D %D\n", idxsum(ncon*nvtxs, vwgt, 1), idxsum(ncon*nparts, pwgts, 1));
*/
for (i=0; i<ncon; i++) {
lb = 1.0*nparts*pwgts[ncon*idxargmax_strd(nparts, pwgts+i, ncon)+i]/(1.0*idxsum(nparts, pwgts+i, ncon));
/*mprintf("[%3.2f]", lb);*/
if (lb > ubvec[i])
rcode = 3;
}
if (efree)
gk_free((void **)&adjwgt, LTERM);
gk_free((void **)&pwgts, LTERM);
MALLOC_CHECK(NULL);
return rcode;
}
/*************************************************************************
* This function tests the regular graph partitioning routines
**************************************************************************/
void Test_ND(idxtype nvtxs, idxtype *xadj, idxtype *adjncy)
{
idxtype i, j, jj, k, tstnum, rcode;
idxtype numflag, wgtflag, options[10];
idxtype *perm, *iperm, *vwgt;
vwgt = idxmalloc(nvtxs, "vwgt");
for (i=0; i<nvtxs; i++)
vwgt[i] = 1+RandomInRange(10);
perm = idxmalloc(nvtxs, "perm");
iperm = idxmalloc(nvtxs, "iperm");
/*===========================================================================*/
mprintf("\nTesting METIS_EdgeND ------------------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0;
options[0] = 0;
METIS_EdgeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_EdgeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_EdgeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
/*===========================================================================*/
mprintf("\nTesting METIS_NodeND ------------------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0;
options[0] = 0;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
options[5] = 0; options[6] = 0; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
options[5] = 0; options[6] = 0; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 1; options[4] = 0;
options[5] = 0; options[6] = 0; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 2; options[3] = 1; options[4] = 0;
options[5] = 0; options[6] = 0; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
options[5] = 0; options[6] = 0; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
options[5] = 1; options[6] = 0; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
options[5] = 2; options[6] = 0; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
options[5] = 3; options[6] = 0; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
options[5] = 3; options[6] = 40; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n ");
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
options[5] = 3; options[6] = 20; options[7] = 1;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
options[5] = 3; options[6] = 20; options[7] = 2;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
options[5] = 0; options[6] = 0; options[7] = 2;
METIS_NodeND(&nvtxs, xadj, adjncy, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
/*===========================================================================*/
mprintf("\nTesting METIS_NodeWND -----------------------------------------------\n ");
tstnum = 1;
/**/
numflag = 0;
options[0] = 0;
METIS_NodeWND(&nvtxs, xadj, adjncy, vwgt, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 1; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_NodeWND(&nvtxs, xadj, adjncy, vwgt, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 2; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_NodeWND(&nvtxs, xadj, adjncy, vwgt, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 1; options[4] = 0;
METIS_NodeWND(&nvtxs, xadj, adjncy, vwgt, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 2; options[3] = 1; options[4] = 0;
METIS_NodeWND(&nvtxs, xadj, adjncy, vwgt, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
/**/
numflag = 0;
options[0] = 1; options[1] = 3; options[2] = 1; options[3] = 2; options[4] = 0;
METIS_NodeWND(&nvtxs, xadj, adjncy, vwgt, &numflag, options, perm, iperm);
if ((rcode = VerifyND(nvtxs, perm, iperm)) == 0)
mprintf("[%D:ok]", tstnum++);
else
mprintf("[%D:err-%D]", tstnum++, rcode);
fflush(stdout);
mprintf("\n");
gk_free((void **)&vwgt, &perm, &iperm, LTERM);
}
/*************************************************************************
* This function verifies that the partitioning was computed correctly
**************************************************************************/
int VerifyND(idxtype nvtxs, idxtype *perm, idxtype *iperm)
{
idxtype i, j, k, rcode=0;
for (i=0; i<nvtxs; i++) {
if (i != perm[iperm[i]])
rcode = 1;
}
for (i=0; i<nvtxs; i++) {
if (i != iperm[perm[i]])
rcode = 2;
}
MALLOC_CHECK(NULL);
return rcode;
}
<|start_filename|>HierarchicalPathfinding/Include/Navigation.h<|end_filename|>
#include <algorithm>
#include "Graph.h"
#include "DetourNavMesh.h"
#include "Recast.h"
#include "DetourStatus.h"
#include "DetourNavMeshQuery.h"
#include "DetourNode.h"
#include "DetourAssert.h"
#include "RecastAssert.h"
#include <fstream>
extern "C" {
#include "metis.h"
}
typedef int idxtype;
class Navigation
{
public:
void createHierarchicalGraph(int p_levels,int p_level,int p_mergedPolys,rcContext* ctx, const dtMeshTile* ptile, const dtNavMesh* pm_navMesh, const dtNavMeshQuery* pm_navQuery, std::map<dtPolyRef, int> &nodesInCluster);
dtStatus findPathNav(rcContext* ctx, dtPolyRef startRef, dtPolyRef endRef, const float* startPos, const float* endPos, const dtQueryFilter* filter, dtPolyRef* path, int &pathCount, const int maxPath);
private:
static const int MAX_POLYS = 256;
const dtMeshTile* tile;
const dtNavMesh* m_navMesh;
const dtNavMeshQuery* m_navQuery;
int numParts;
dtPolyRef refBase;
int levels;
int level;
int maxPolyInNode;
class dtNodePool* m_nodePool; ///< Pointer to node pool.
class dtNodeQueue* m_openList; ///< Pointer to open list queue.
Graph mainGraph;
Graph currentGraph;
Graph parentGraph;
int numTotalEdges;
int numLevel;
std::map<dtPolyRef, dtPolyRef> nodeCluster;
std::multimap<dtPolyRef, dtPolyRef> clusterNode;
Graph *graphs;
int numGraphs;
void mergeNodes();
void buildHierarchy();
void buildNodes();
void buildEdges();
void init();
dtStatus findHierarchicalPath(dtPolyRef startRef, dtPolyRef endRef, int startIdPos, int endIdPos, const float* startPos, const float* endPos, dtPolyRef* tempPathNodes,int* tempPathPolys, int *nTempPath, const int maxPath);
float findPathLocal(dtPolyRef startRef, dtPolyRef endRef,int startIdPos, int endIdPos, const float* startPos, const float* endPos, dtPolyRef* path, int &pathCount, const int maxPath, std::vector<dtPolyRef> subGraphNodes);
float findPath(dtPolyRef startRef, dtPolyRef endRef, const float* startPos, const float* endPos, dtPolyRef* path, int &pathCount, const int maxPath);
Graph::Node * getNode(dtPolyRef ref, int l);
void getPath(int fromPosId, int toPosId, Graph::Node *node, int l, dtPolyRef* tempPath, int &nTempPath);
void setGraph();
float getCost(const Graph::Node *node, int startPosId, int endPosId, const float * startPos, const float * endPos);
void linkStartToGraph(Graph::Node *node, dtPolyRef ref, const float *pos, int startIdPos);
void linkEndToGraph(Graph::Node *node, dtPolyRef ref, const float *pos, int startIdPos);
void checkPartition(int* part, const int numNodes, const int numParts);
void explorePartition(int idNode, int* newPart, int* part);
};
<|start_filename|>RecastDemo/Contrib/metis/source/GKlib/trunk/gk_externs.h<|end_filename|>
/*!
\file gk_externs.h
\brief This file contains definitions of external variables created by GKlib
\date Started 3/27/2007
\author George
\version\verbatim $Id: gk_externs.h 1277 2007-03-27 21:17:33Z karypis $ \endverbatim
*/
#ifndef _GK_EXTERNS_H_
#define _GK_EXTERNS_H_
/*************************************************************************
* Extern variable definition. Hopefully, the __thread makes them thread-safe.
**************************************************************************/
#ifndef _GK_ERROR_C_
extern __thread jmp_buf gk_return_to_entry; /* declared in error.c */
#endif
#endif
<|start_filename|>RecastDemo/Contrib/metis/source/config/config.h<|end_filename|>
#ifndef CONFIG_H
#define CONFIG_H 1
/****************************************************************************
* A set of defines that can be modified by the user
*****************************************************************************/
/*--------------------------------------------------------------------------
Specifies the width of the elementary data type that will hold information
about vertices and their adjacency lists.
Possible values:
32 : Use 32 bit signed integers
64 : Use 64 bit signed integers
A width of 64 should be specified if the number of vertices or the total
number of edges in the graph exceed the limits of a 32 bit signed integer
i.e., 2^31-1.
Proper use of 64 bit integers requires that the c99 standard datatypes
int32_t and int64_t are supported by the compiler.
GCC does provides these definitions in stdint.h, but it may require some
modifications on other architectures.
--------------------------------------------------------------------------*/
#define IDXTYPEWIDTH 32
/*--------------------------------------------------------------------------
Specifies if the __thread storage directive is available by the compiler
to indicate thread local storage. This storage directive is available in
most systems using gcc compiler but it may not be available in other
systems.
Possible values:
0 : Not available and do not use thread local storage
1 : It is available and the __thread modifier will be used
--------------------------------------------------------------------------*/
#define HAVE_THREADLOCALSTORAGE 0
/****************************************************************************
* Do not change anything bellow this point
*****************************************************************************/
/* Uniform defines for various compilers */
#if defined(_MSC_VER)
#define COMPILER_MSC
#endif
#if defined(__ICC)
#define COMPILER_ICC
#endif
#if defined(__GNUC__)
#define COMPILER_GCC
#endif
#if defined(COMPILER_GCC)
#include <stdint.h>
#endif
#if defined(COMPILER_MSC)
#include <ctrdefs.h>
#define __thread __declspec( thread )
typedef __int32 int32_t;
typedef __int64 int64_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int64 uint64_t;
#endif
#if defined(UNIX)
#include <getopt.h>
#include <sys/time.h>
#include <sys/resource.h>
#endif
#if defined(ENABLE_OPENMP)
#include <omp.h>
#endif
#endif /* CONFIG_H */
<|start_filename|>RecastDemo/Contrib/metis/source/programs/Makefile<|end_filename|>
include ../Makefile.in
CURBUILDDIR = $(PRGBUILDDIR)
METISSRC = metis.c io.c smbfactor.c
PMETISSRC = pmetis.c io.c cmdline_pmetis.c
KMETISSRC = kmetis.c io.c
OEMETISSRC = oemetis.c io.c smbfactor.c
ONMETISSRC = onmetis.c io.c smbfactor.c
MESH2DUALSRC = mesh2dual.c io.c
MESH2NODALSRC = mesh2nodal.c io.c
PARTDMESHSRC = partdmesh.c io.c
PARTNMESHSRC = partnmesh.c io.c
GRAPHCHKSRC = graphchk.c io.c
KFMETISSRC = kfmetis.c io.c cmdline_kfmetis.c smbfactor.c
CMETISSRC = cmetis.c io.c cmdline_cmetis.c
METISOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(METISSRC))
PMETISOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(PMETISSRC))
KMETISOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(KMETISSRC))
OEMETISOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(OEMETISSRC))
ONMETISOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(ONMETISSRC))
MESH2DUALOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(MESH2DUALSRC))
MESH2NODALOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(MESH2NODALSRC))
PARTDMESHOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(PARTDMESHSRC))
PARTNMESHOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(PARTNMESHSRC))
GRAPHCHKOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(GRAPHCHKSRC))
KFMETISOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(KFMETISSRC))
CMETISOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(CMETISSRC))
ALLOBJS = $(patsubst %.c, $(CURBUILDDIR)/%$(OBJEXT), $(wildcard *.c))
HEADERS = $(wildcard *.h) $(TOPDIR)/include/metis.h $(wildcard $(GKLIBINCDIR)/*.h)
LIBRARIES = $(BUILDDIR)/libmetis$(LIBEXT)
TARGETS = $(BUILDDIR)/metis$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/pmetis$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/kmetis$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/oemetis$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/onmetis$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/mesh2dual$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/mesh2nodal$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/partdmesh$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/partnmesh$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/graphchk$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/kfmetis$(VERNUM)$(EXEEXT) \
$(BUILDDIR)/cmetis$(VERNUM)$(EXEEXT)
default: $(TARGETS)
$(BUILDDIR)/metis$(VERNUM)$(EXEEXT): $(METISOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(METISOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/pmetis$(VERNUM)$(EXEEXT): $(PMETISOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(PMETISOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/kmetis$(VERNUM)$(EXEEXT): $(KMETISOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(KMETISOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/oemetis$(VERNUM)$(EXEEXT): $(OEMETISOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(OEMETISOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/onmetis$(VERNUM)$(EXEEXT): $(ONMETISOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(ONMETISOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/mesh2dual$(VERNUM)$(EXEEXT): $(MESH2DUALOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(MESH2DUALOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/mesh2nodal$(VERNUM)$(EXEEXT): $(MESH2NODALOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(MESH2NODALOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/partdmesh$(VERNUM)$(EXEEXT): $(PARTDMESHOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(PARTDMESHOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/partnmesh$(VERNUM)$(EXEEXT): $(PARTNMESHOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(PARTNMESHOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/graphchk$(VERNUM)$(EXEEXT): $(GRAPHCHKOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(GRAPHCHKOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/kfmetis$(VERNUM)$(EXEEXT): $(KFMETISOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(KFMETISOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/cmetis$(VERNUM)$(EXEEXT): $(CMETISOBJS) $(LIBRARIES)
$(LD) $(LDOPTIONS) $(EXEOUTPUTFILE) $(CMETISOBJS) $(LIBSDIR) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/cepic$(VERNUM)$(EXEEXT): $(CEPICOBJS) $(LIBRARIES)
$(LD) -o $@ $(CEPICOBJ) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/cepic-rcb$(VERNUM)$(EXEEXT): $(CEPICRCBOBJS) $(LIBRARIES)
$(LD) -o $@ $(CEPICRCBOBJ) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
$(BUILDDIR)/mmetis$(VERNUM)$(EXEEXT): $(MMETISOBJS) $(LIBRARIES)
$(LD) -o $@ $(MMETISOBJS) $(LIBS)
chmod 744 $@
@if [ "$(BINDIR)" ]; then cp $@* $(BINDIR); fi
clean:
rm -f $(ALLOBJS)
realclean:
rm -f $(ALLOBJS) $(TARGETS)
$(ALLOBJS) : $(HEADERS) ../Makefile.in Makefile
$(CURBUILDDIR)/%$(OBJEXT) : %.c
$(CC) $(CFLAGS) $(SOURCEFILE) $(OUTPUTFILE)
<|start_filename|>RecastDemo/Contrib/metis/source/programs/pmetis.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* pmetis.c
*
* This file contains the driving routine for multilevel method
*
* Started 8/28/94
* George
*
* $Id: pmetis.c,v 1.3 2002/08/10 06:57:51 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, options[10];
idxtype *part;
float lbvec[MAXNCON];
GraphType graph;
idxtype numflag = 0, wgtflag = 0, edgecut;
ParamType params;
double TOTALTmr, METISTmr, IOTmr;
parse_cmdline(¶ms, argc, argv);
if (params.nparts < 2) {
mprintf("The number of partitions should be greater than 1!\n");
exit(0);
}
gk_clearcputimer(TOTALTmr);
gk_clearcputimer(METISTmr);
gk_clearcputimer(IOTmr);
gk_startcputimer(TOTALTmr);
gk_startcputimer(IOTmr);
ReadGraph(&graph, params.filename, &wgtflag);
if (graph.nvtxs <= 0) {
mprintf("Empty graph. Nothing to do.\n");
exit(0);
}
gk_stopcputimer(IOTmr);
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Graph Information ---------------------------------------------------\n");
mprintf(" Name: %s, #Vertices: %D, #Edges: %D, #Parts: %D\n", params.filename, graph.nvtxs, graph.nedges/2, params.nparts);
if (graph.ncon > 1)
mprintf(" Balancing Constraints: %D\n", graph.ncon);
mprintf("\nRecursive Partitioning... -------------------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
options[0] = 0;
options[0] = 1;
options[OPTION_CTYPE] = params.mtype;
options[OPTION_ITYPE] = params.itype;
options[OPTION_RTYPE] = params.rtype;
options[OPTION_DBGLVL] = params.dbglvl;
gk_startcputimer(METISTmr);
if (graph.ncon == 1) {
METIS_PartGraphRecursive(&graph.nvtxs, graph.xadj, graph.adjncy, graph.vwgt, graph.adjwgt,
&wgtflag, &numflag, &(params.nparts), options, &edgecut, part);
}
else {
METIS_mCPartGraphRecursive(&graph.nvtxs, &graph.ncon, graph.xadj, graph.adjncy, graph.vwgt,
graph.adjwgt, &wgtflag, &numflag, &(params.nparts), options, &edgecut, part);
}
gk_stopcputimer(METISTmr);
ComputePartitionBalance(&graph, params.nparts, part, lbvec);
mprintf(" %D-way Edge-Cut: %7D, Balance: ", params.nparts, edgecut);
for (i=0; i<graph.ncon; i++)
mprintf("%5.2f ", lbvec[i]);
mprintf("\n");
gk_startcputimer(IOTmr);
WritePartition(params.filename, part, graph.nvtxs, params.nparts);
gk_stopcputimer(IOTmr);
gk_stopcputimer(TOTALTmr);
mprintf("\nTiming Information --------------------------------------------------\n");
mprintf(" I/O: \t\t %7.3f\n", gk_getcputimer(IOTmr));
mprintf(" Partitioning: \t\t %7.3f (PMETIS time)\n", gk_getcputimer(METISTmr));
mprintf(" Total: \t\t %7.3f\n", gk_getcputimer(TOTALTmr));
mprintf("**********************************************************************\n");
gk_free((void **)&graph.xadj, &graph.adjncy, &graph.vwgt, &graph.adjwgt, &part, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/struct.h<|end_filename|>
/*
* struct.h
*
* This file contains data structures for the various programs of METIS.
*
* Started 8/9/02
* George
*
* $Id: struct.h,v 1.8 2003/04/04 23:22:50 karypis Exp $
*/
#ifndef _STRUCTBIN_H_
#define _STRUCTBIN_H_
/*************************************************************************
* The following data structure implements a string-2-idxtype mapping
* table used for parsing command-line options
**************************************************************************/
typedef struct {
char *name;
idxtype id;
} StringMapType;
/*************************************************************************
* This data structure stores the various command line arguments
**************************************************************************/
typedef struct {
idxtype mtype;
idxtype itype;
idxtype rtype;
idxtype balance;
idxtype ntrials;
idxtype niter;
idxtype seed;
idxtype dbglvl;
idxtype nparts;
char *filename;
char *xyzfilename;
char *tpwgts;
float iotimer;
float clustertimer;
float reporttimer;
} ParamType;
#endif
<|start_filename|>RecastDemo/Contrib/metis/source/programs/proto.h<|end_filename|>
/*
* proto.h
*
* This file contains function prototypes
*
* Started 11/1/99
* George
*
* $Id: proto.h,v 1.15 2003/04/04 23:22:49 karypis Exp $
*
*/
#ifndef _PROTOBIN_H_
#define _PROTOBIN_H_
/* io.c */
void ReadGraph(GraphType *, char *, idxtype *);
void ReadCoordinates(GraphType *, char *);
void WritePartition(char *, idxtype *, idxtype, idxtype);
void WriteMeshPartition(char *, idxtype, idxtype, idxtype *, idxtype, idxtype *);
void WritePermutation(char *, idxtype *, idxtype);
int CheckGraph(GraphType *);
int MeshType(char *);
idxtype *ReadWgt(char *, idxtype *, idxtype *, idxtype *);
idxtype *ReadMesh(char *, idxtype *, idxtype *, idxtype *);
idxtype *ReadMeshWgt(char *, idxtype *, idxtype *, idxtype *, idxtype *);
idxtype *ReadMixedMesh(char *, idxtype *, idxtype *, idxtype *);
idxtype *ReadMixedMeshWgt(char *, idxtype *, idxtype *, idxtype *, idxtype *);
void WriteGraph(char *, idxtype, idxtype *, idxtype *);
idxtype MixedElements(char *);
idxtype *ReadMgcnums(char *);
void WriteWgtGraph(char *, idxtype , idxtype *, idxtype *, idxtype *);
/* smbfactor.c */
void ComputeFillIn(GraphType *, idxtype *);
idxtype ComputeFillIn2(GraphType *, idxtype *);
idxtype smbfct(idxtype, idxtype *, idxtype *, idxtype *, idxtype *, idxtype *, idxtype *, idxtype *, idxtype *, idxtype *);
/* cmdline.c */
void parse_cmdline(ParamType *params, int argc, char *argv[]);
/* cpmetis.c */
#endif
<|start_filename|>HierarchicalPathfinding/Include/Graph.h<|end_filename|>
#ifndef __GRAPH_H__
#define __GRAPH_H__
#include "DetourCommon.h"
#include "DetourAlloc.h"
#include <vector>
#include <map>
const int maxInternalPath = 256;
const int maxNodes = 2048;
static const float H_SCALE = 0.999f;
class Graph{
typedef unsigned int dtPolyRef;
public:
struct IntraEdge
{
int startPosId;
int endPosId;
int nPath;
float cost;
dtPolyRef path[maxInternalPath];
};
struct Edge
{
dtPolyRef targetNodeId;
int idPos;
int idPoly;
float pos[3];
};
struct Node
{
dtPolyRef idNode;
dtPolyRef idParent;
Edge *edges;
unsigned int numEdges;
IntraEdge *intraEdges;
unsigned int numIntraEdges;
void InitEdge(int maxEdgesPerNode)
{
numEdges = 0;
edges = 0;
edges = (Edge*)dtAlloc(sizeof(Edge)*maxEdgesPerNode, DT_ALLOC_PERM);
memset(edges, 0, sizeof(Edge)*maxEdgesPerNode);
}
void DestroyEdge()
{
dtFree(edges);
numEdges = 0;
}
void InitIntraEdge()
{
int maxIntraEdgesPerNode = numEdges * numEdges;
if(maxIntraEdgesPerNode > 0)
{
numIntraEdges = 0;
intraEdges = 0;
intraEdges = (IntraEdge*)dtAlloc(sizeof(IntraEdge)*maxIntraEdgesPerNode, DT_ALLOC_PERM);
memset(intraEdges, 0, sizeof(IntraEdge)*maxIntraEdgesPerNode);
}
}
void DestroyIntraEdge()
{
dtFree(intraEdges);
numIntraEdges = 0;
}
};
Node* nodes;
int numNodes;
void AddEdge(dtPolyRef sourceIdNode, dtPolyRef targetIdNode, float* pos, int idPos, int idPoly)
{
Node *node = &nodes[sourceIdNode];
Edge *edge = 0;
edge = &node->edges[node->numEdges++];
edge->targetNodeId = targetIdNode;
edge->idPos = idPos;
edge->idPoly = idPoly;
dtVcopy(edge->pos, pos);
}
void AddNode(int nodeId)
{
Node *node = 0;
node = &nodes[numNodes++];
node->idNode = nodeId;
}
void AddIntraEdge(dtPolyRef clusterId, int startPosId, int endPosId, float cost, dtPolyRef* path, int nPath)
{
Node *node = &nodes[clusterId];
IntraEdge *intraEdge = 0;
intraEdge = &node->intraEdges[node->numIntraEdges++];
intraEdge->startPosId = startPosId;
intraEdge->endPosId = endPosId;
intraEdge->cost = cost;
intraEdge->nPath = nPath;
memcpy(intraEdge->path, path, sizeof(dtPolyRef)*nPath);
}
void AddParent(dtPolyRef nodeId, dtPolyRef idParent)
{
Node *node = &nodes[nodeId];
node->idParent = idParent;
}
void Init(int numMaxNodes)
{
numNodes = 0;
nodes = 0;
nodes = (Node*)dtAlloc(sizeof(Node)*numMaxNodes, DT_ALLOC_PERM);
memset(nodes, 0, sizeof(Node)*numMaxNodes);
}
void InitEdge(int nodeId, int maxEdges)
{
Node *node = &nodes[nodeId];
node->InitEdge(maxEdges);
}
void Destroy()
{
dtFree(nodes);
numNodes = 0;
}
};
#endif // __GRAPH_H__
<|start_filename|>RecastDemo/Contrib/metis/source/programs/graphchk.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* graphchk.c
*
* This file checks the validity of a graph
*
* Started 8/28/94
* George
*
* $Id: graphchk.c,v 1.2 2002/08/10 06:02:53 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
GraphType graph;
char filename[256];
idxtype wgtflag;
if (argc != 2) {
mprintf("Usage: %s <GraphFile>\n", argv[0]);
exit(0);
}
strcpy(filename, argv[1]);
ReadGraph(&graph, filename, &wgtflag);
if (graph.nvtxs == 0) {
mprintf("Empty graph!\n");
exit(0);
}
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Graph Information ---------------------------------------------------\n");
mprintf(" Name: %s, #Vertices: %D, #Edges: %D\n\n", filename, graph.nvtxs, graph.nedges/2);
mprintf("Checking Graph... ---------------------------------------------------\n");
if (CheckGraph(&graph))
mprintf(" The format of the graph is correct!\n");
else
mprintf(" The format of the graph is incorrect!\n");
mprintf("\n**********************************************************************\n");
gk_free((void **)&graph.xadj, &graph.adjncy, &graph.vwgt, &graph.adjwgt, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/mesh2dual.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* mesh2dual.c
*
* This file reads in the element node connectivity array of a mesh and writes
* out its dual in the format suitable for Metis.
*
* Started 9/29/97
* George
*
* $Id: mesh2dual.c,v 1.2 2002/08/10 06:02:53 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, j, ne, nn, etype, mtype, cnt, numflag=0;
idxtype *elmnts, *xadj, *adjncy, *metype;
idxtype *conmat, *elms, *weights;
double IOTmr, DUALTmr;
char fileout[256], etypestr[5][5] = {"TRI", "TET", "HEX", "QUAD", "LINE"};
if (argc <2) {
mprintf("Usage: %s <meshfile> [confile]\n",argv[0]);
exit(0);
}
mtype=MeshType(argv[1]);
ne=MixedElements(argv[1]);
metype = idxmalloc(ne, "main: metype");
weights = idxmalloc(ne, "main: weights");
if (mtype==1 || mtype==3){
gk_clearcputimer(IOTmr);
gk_clearcputimer(DUALTmr);
gk_startcputimer(IOTmr);
if (mtype==1)
elmnts = ReadMesh(argv[1], &ne, &nn, &etype);
else
elmnts = ReadMeshWgt(argv[1], &ne, &nn, &etype, weights);
gk_stopcputimer(IOTmr);
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Mesh Information ----------------------------------------------------\n");
mprintf(" Name: %s, #Elements: %D, #Nodes: %D, Etype: %s\n\n", argv[1], ne, nn, etypestr[etype-1]);
mprintf("Forming Dual Graph... -----------------------------------------------\n");
xadj = idxmalloc(ne+1, "main: xadj");
elms = idxsmalloc(ne+1, 0, "main: elms");
gk_startcputimer(DUALTmr);
cnt=METIS_MeshToDualCount(&ne, &nn, elmnts, elms, &etype, &numflag);
adjncy = idxmalloc(cnt+1, "main: adjncy");
METIS_MeshToDual(&ne, &nn, elmnts, elms, &etype, &numflag, xadj, adjncy);
gk_stopcputimer(DUALTmr);
mprintf(" Dual Information: #Vertices: %D, #Edges: %D\n", ne, xadj[ne]/2);
msprintf(fileout, "%s.dgraph", argv[1]);
gk_startcputimer(IOTmr);
if (mtype==1)
WriteGraph(fileout, ne, xadj, adjncy);
else
WriteWgtGraph(fileout, ne, xadj, adjncy, weights);
gk_stopcputimer(IOTmr);
mprintf("\nTiming Information --------------------------------------------------\n");
mprintf(" I/O: \t\t %7.3f\n", gk_getcputimer(IOTmr));
mprintf(" Dual Creation:\t\t %7.3f\n", gk_getcputimer(DUALTmr));
mprintf("**********************************************************************\n");
}
else {
gk_clearcputimer(IOTmr);
gk_clearcputimer(DUALTmr);
gk_startcputimer(IOTmr);
if(mtype==0)
elmnts = ReadMixedMesh(argv[1], &ne, &nn, metype);
else
elmnts = ReadMixedMeshWgt(argv[1], &ne, &nn, metype, weights);
if (argc==3)
conmat = ReadMgcnums(argv[2]);
gk_stopcputimer(IOTmr);
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Mesh Information ----------------------------------------------------\n");
mprintf(" Name: %s, #Elements: %D, #Nodes: %D, Etype: %s\n\n", argv[1], ne, nn, "Mixed");
mprintf("Forming Dual Graph... ----------------------------------------------\n");
xadj = idxmalloc(ne+1, "main: xadj");
elms = idxsmalloc(ne+1, 0, "main: elms");
gk_startcputimer(DUALTmr);
if (argc==3){
cnt=METIS_MixedMeshToDualCount(&ne, &nn, elmnts, elms, metype, &numflag,
conmat, 1);
adjncy = idxmalloc(cnt+1, "main: adjncy");
METIS_MixedMeshToDual(&ne, &nn, elmnts, elms, metype, &numflag, xadj, adjncy,
conmat, 1);
}
else{
cnt=METIS_MixedMeshToDualCount(&ne, &nn, elmnts, elms, metype, &numflag,
conmat, 0);
adjncy = idxmalloc(cnt+1, "main: adjncy");
METIS_MixedMeshToDual(&ne, &nn, elmnts, elms, metype, &numflag, xadj, adjncy, conmat, 0);
}
gk_stopcputimer(DUALTmr);
mprintf(" Dual Information: #Vertices: %D, #Edges: %D\n", ne, xadj[ne]/2);
msprintf(fileout, "%s.dgraph", argv[1]);
gk_startcputimer(IOTmr);
if (mtype==0)
WriteGraph(fileout, ne, xadj, adjncy);
else
WriteWgtGraph(fileout, ne, xadj, adjncy, weights);
gk_stopcputimer(IOTmr);
mprintf("\nTiming Information --------------------------------------------------\n");
mprintf(" I/O: \t\t %7.3f\n", gk_getcputimer(IOTmr));
mprintf(" Dual Creation:\t\t %7.3f\n", gk_getcputimer(DUALTmr));
mprintf("**********************************************************************\n");
}
gk_free((void **)&elmnts, &xadj, &adjncy, &metype, &weights, &elms, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/metis.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* metis.c
*
* This file contains the driving routine for multilevel method
*
* Started 8/28/94
* George
*
* $Id: metis.c,v 1.2 2002/08/10 06:02:53 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, nparts, OpType, options[10], nbytes;
idxtype *part, *perm, *iperm, *sizes;
GraphType graph;
char filename[256];
idxtype numflag = 0, wgtflag = 0, edgecut;
if (argc != 11) {
mprintf("Usage: %s <GraphFile> <Nparts> <Mtype> <Rtype> <IPtype> <Oflags> <Pfactor> <Nseps> <OPtype> <Options> \n",argv[0]);
exit(0);
}
strcpy(filename, argv[1]);
nparts = atoi(argv[2]);
options[OPTION_CTYPE] = atoi(argv[3]);
options[OPTION_RTYPE] = atoi(argv[4]);
options[OPTION_ITYPE] = atoi(argv[5]);
options[OPTION_OFLAGS] = atoi(argv[6]);
options[OPTION_PFACTOR] = atoi(argv[7]);
options[OPTION_NSEPS] = atoi(argv[8]);
OpType = atoi(argv[9]);
options[OPTION_DBGLVL] = atoi(argv[10]);
ReadGraph(&graph, filename, &wgtflag);
if (graph.nvtxs <= 0) {
mprintf("Empty graph. Nothing to do.\n");
exit(0);
}
mprintf("Partitioning a graph with %D vertices and %D edges\n", graph.nvtxs, graph.nedges/2);
METIS_EstimateMemory(&graph.nvtxs, graph.xadj, graph.adjncy, &numflag, &OpType, &nbytes);
mprintf("Metis will need %D Mbytes\n", nbytes/(1024*1024));
part = perm = iperm = NULL;
options[0] = 1;
switch (OpType) {
case OP_PMETIS:
mprintf("Recursive Partitioning... ------------------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
METIS_PartGraphRecursive(&graph.nvtxs, graph.xadj, graph.adjncy, graph.vwgt, graph.adjwgt,
&wgtflag, &numflag, &nparts, options, &edgecut, part);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, part, graph.nvtxs, nparts));
mprintf(" %D-way Edge-Cut: %7D\n", nparts, edgecut);
ComputePartitionInfo(&graph, nparts, part);
gk_free((void **)&part, LTERM);
break;
case OP_KMETIS:
mprintf("K-way Partitioning... -----------------------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
METIS_PartGraphKway(&graph.nvtxs, graph.xadj, graph.adjncy, graph.vwgt, graph.adjwgt,
&wgtflag, &numflag, &nparts, options, &edgecut, part);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, part, graph.nvtxs, nparts));
mprintf(" %D-way Edge-Cut: %7D\n", nparts, edgecut);
ComputePartitionInfo(&graph, nparts, part);
gk_free((void **)&part, LTERM);
break;
case OP_OEMETIS:
gk_free((void **)&graph.vwgt, &graph.adjwgt, LTERM);
mprintf("Edge-based Nested Dissection Ordering... ----------------------------\n");
perm = idxmalloc(graph.nvtxs, "main: perm");
iperm = idxmalloc(graph.nvtxs, "main: iperm");
METIS_EdgeND(&graph.nvtxs, graph.xadj, graph.adjncy, &numflag, options, perm, iperm);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, iperm, graph.nvtxs, 0));
ComputeFillIn(&graph, iperm);
gk_free((void **)&perm, &iperm, LTERM);
break;
case OP_ONMETIS:
gk_free((void **)&graph.vwgt, &graph.adjwgt, LTERM);
mprintf("Node-based Nested Dissection Ordering... ----------------------------\n");
perm = idxmalloc(graph.nvtxs, "main: perm");
iperm = idxmalloc(graph.nvtxs, "main: iperm");
METIS_NodeND(&graph.nvtxs, graph.xadj, graph.adjncy, &numflag, options, perm, iperm);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, iperm, graph.nvtxs, 0));
ComputeFillIn(&graph, iperm);
gk_free((void **)&perm, &iperm, LTERM);
break;
case OP_ONWMETIS:
gk_free((void **)&graph.adjwgt, LTERM);
mprintf("WNode-based Nested Dissection Ordering... ---------------------------\n");
perm = idxmalloc(graph.nvtxs, "main: perm");
iperm = idxmalloc(graph.nvtxs, "main: iperm");
METIS_NodeWND(&graph.nvtxs, graph.xadj, graph.adjncy, graph.vwgt, &numflag, options, perm, iperm);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, iperm, graph.nvtxs, 0));
ComputeFillIn(&graph, iperm);
gk_free((void **)&perm, &iperm, LTERM);
break;
case 6:
gk_free((void **)&graph.vwgt, &graph.adjwgt, LTERM);
mprintf("Node-based Nested Dissection Ordering... ----------------------------\n");
perm = idxmalloc(graph.nvtxs, "main: perm");
iperm = idxmalloc(graph.nvtxs, "main: iperm");
sizes = idxmalloc(2*nparts, "main: sizes");
METIS_NodeNDP(graph.nvtxs, graph.xadj, graph.adjncy, nparts, options, perm, iperm, sizes);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, iperm, graph.nvtxs, 0));
ComputeFillIn(&graph, iperm);
for (i=0; i<2*nparts-1; i++)
mprintf("%D ", sizes[i]);
mprintf("\n");
gk_free((void **)&perm, &iperm, &sizes, LTERM);
break;
case 7:
mprintf("K-way Vol Partitioning... -------------------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
METIS_PartGraphVKway(&graph.nvtxs, graph.xadj, graph.adjncy, graph.vwgt, NULL,
&wgtflag, &numflag, &nparts, options, &edgecut, part);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, part, graph.nvtxs, nparts));
mprintf(" %D-way Volume: %7D\n", nparts, edgecut);
ComputePartitionInfo(&graph, nparts, part);
gk_free((void **)&part, LTERM);
break;
case 9:
mprintf("K-way Partitioning (with vwgts)... ----------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
graph.vwgt = idxmalloc(graph.nvtxs, "main: graph.vwgt");
for (i=0; i<graph.nvtxs; i++)
graph.vwgt[i] = graph.xadj[i+1]-graph.xadj[i]+1;
wgtflag = 2;
METIS_PartGraphKway(&graph.nvtxs, graph.xadj, graph.adjncy, graph.vwgt, graph.adjwgt,
&wgtflag, &numflag, &nparts, options, &edgecut, part);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, part, graph.nvtxs, nparts));
mprintf(" %D-way Edge-Cut: %7D\n", nparts, edgecut);
ComputePartitionInfo(&graph, nparts, part);
gk_free((void **)&part, LTERM);
break;
case 10:
break;
default:
errexit("Unknown");
}
gk_free((void **)&graph.xadj, &graph.adjncy, &graph.vwgt, &graph.adjwgt, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/cmdline_cmetis.c<|end_filename|>
/*
* cmdline_cpmetis.c
*
* This file parses the command line arguments
*
* Started 3/25/03
* George
*
* $Id: cmdline_cmetis.c,v 1.1 2003/04/04 23:22:47 karypis Exp $
*
*/
#include <metisbin.h>
/*-------------------------------------------------------------------
* Command-line options
*-------------------------------------------------------------------*/
static struct gk_option long_options[] = {
{"mtype", 1, 0, CMD_MTYPE},
{"itype", 1, 0, CMD_ITYPE},
{"rtype", 1, 0, CMD_RTYPE},
{"balanced", 0, 0, CMD_BALANCE},
{"niter", 1, 0, CMD_NITER},
{"tpwgts", 1, 0, CMD_TPWGTS},
{"seed", 1, 0, CMD_SEED},
{"dbglvl", 1, 0, CMD_DBGLVL},
{"help", 0, 0, CMD_HELP},
{0, 0, 0, 0}
};
/*-------------------------------------------------------------------
* Mappings for the various parameter values
*-------------------------------------------------------------------*/
static gk_StringMap_t mtype_options[] = {
{"rm", MTYPE_RM},
{"hem", MTYPE_HEM},
{"shem", MTYPE_SHEM},
{"shebm", MTYPE_SHEBM_ONENORM},
{"sbhem", MTYPE_SBHEM_ONENORM},
{NULL, 0}
};
static gk_StringMap_t itype_options[] = {
{"greedy", ITYPE_GGPKL},
{"random", ITYPE_RANDOM},
{NULL, 0}
};
static gk_StringMap_t rtype_options[] = {
{"fm", RTYPE_FM},
{NULL, 0}
};
/*-------------------------------------------------------------------
* Mini help
*-------------------------------------------------------------------*/
static char helpstr[][100] =
{
" ",
"Usage: cpmetis [options] <graph filename> <coord filename> <nparts>",
" ",
" Required parameters",
" filename Stores the graph to be partitioned.",
" nparts The number of partitions to split the graph.",
" ",
" Optional parameters",
" -mtyep=string",
" Specifies the scheme to be used to match the vertices of the graph",
" during the coarsening.",
" The possible values are:",
" rm - Random matching",
" hem - Heavy-edge matching",
" shem - Sorted heavy-edge matching [default]",
" shebm - Combination of shem and balanced matching for",
" multi-constraint.",
" sbhem - Similar as shebm but priority is given to balance",
" ",
" -itype=string",
" Specifies the scheme to be used to compute the initial partitioning",
" of the graph.",
" The possible values are:",
" greedy - Grow a bisection using a greedy strategy [default]",
" random - Compute a bisection at random",
" ",
" -rtype=string",
" Specifies the scheme to be used for refinement",
" The possible values are:",
" fm - FM refinement",
" ",
" -balance",
" Specifies that the final partitioning should contain nparts-1 equal",
" size partitions with the last partition having upto nparts-1 fewer",
" vertices.",
" ",
" -seed=int ",
" Selects the seed of the random number generator. ",
" ",
" -dbglvl=int ",
" Selects the dbglvl. ",
" ",
" -help",
" Prints this message.",
""
};
static char shorthelpstr[][100] = {
" ",
" Usage: cpmetis [options] <graph filename> <coord filename> <nparts>",
" use 'cpmetis -help' for a summary of the options.",
""
};
/*************************************************************************
* This is the entry point of the command-line argument parser
**************************************************************************/
void parse_cmdline(ParamType *params, int argc, char *argv[])
{
int i, j, k;
int c, option_index;
/* initialize the params data structure */
params->mtype = PMETIS_CTYPE;
params->itype = PMETIS_ITYPE;
params->rtype = PMETIS_RTYPE;
params->dbglvl = PMETIS_DBGLVL;
params->balance = 0;
params->seed = -1;
params->dbglvl = 0;
params->filename = NULL;
params->xyzfilename = NULL;
params->nparts = 1;
/* Parse the command line arguments */
while ((c = gk_getopt_long_only(argc, argv, "", long_options, &option_index)) != -1) {
switch (c) {
case CMD_MTYPE:
if (gk_optarg)
if ((params->mtype = gk_GetStringID(mtype_options, gk_optarg)) == -1)
errexit("Invalid option -%s=%s\n", long_options[option_index].name, gk_optarg);
break;
case CMD_ITYPE:
if (gk_optarg)
if ((params->itype = gk_GetStringID(itype_options, gk_optarg)) == -1)
errexit("Invalid option -%s=%s\n", long_options[option_index].name, gk_optarg);
break;
case CMD_RTYPE:
if (gk_optarg)
if ((params->rtype = gk_GetStringID(rtype_options, gk_optarg)) == -1)
errexit("Invalid option -%s=%s\n", long_options[option_index].name, gk_optarg);
break;
case CMD_BALANCE:
params->balance = 1;
break;
case CMD_SEED:
if (gk_optarg) params->seed = atoi(gk_optarg);
break;
case CMD_DBGLVL:
if (gk_optarg) params->dbglvl = atoi(gk_optarg);
break;
case CMD_HELP:
for (i=0; strlen(helpstr[i]) > 0; i++)
mprintf("%s\n", helpstr[i]);
exit(0);
break;
case '?':
default:
mprintf("Illegal command-line option(s)\nUse %s -help for a summary of the options.\n", argv[0]);
exit(0);
}
}
if (argc-gk_optind != 3) {
mprintf("Missing parameters.");
for (i=0; strlen(shorthelpstr[i]) > 0; i++)
mprintf("%s\n", shorthelpstr[i]);
exit(0);
}
params->filename = strdup(argv[gk_optind++]);
params->xyzfilename = strdup(argv[gk_optind++]);
params->nparts = atoi(argv[gk_optind++]);
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/mmetis.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* metis.c
*
* This file contains the driving routine for multilevel method
*
* Started 8/28/94
* George
*
* $Id: mmetis.c,v 1.2 2002/08/10 06:02:53 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, j, nparts, OpType, options[10], nbytes;
idxtype *part, *perm, *iperm, *sizes;
GraphType graph;
char filename[256];
idxtype numflag = 0, wgtflag = 0, edgecut;
idxtype *mvwgt, *vwgt, *vsize;
float rubvec[MAXNCON];
if (argc < 8) {
mprintf("Usage: %s <GraphFile> <Nparts> <Mtype> <Rtype> <IPtype> <OpType> <Options> \n",argv[0]);
exit(0);
}
strcpy(filename, argv[1]);
nparts = atoi(argv[2]);
options[OPTION_CTYPE] = atoi(argv[3]);
options[OPTION_RTYPE] = atoi(argv[4]);
options[OPTION_ITYPE] = atoi(argv[5]);
OpType = atoi(argv[6]);
options[OPTION_DBGLVL] = atoi(argv[7]);
ReadGraph(&graph, filename, &wgtflag);
if (graph.nvtxs <= 0) {
mprintf("Empty graph. Nothing to do.\n");
exit(0);
}
mprintf("Partitioning a graph with %D vertices and %D edges. Constraints: %D\n", graph.nvtxs, graph.nedges/2, graph.ncon);
part = perm = iperm = NULL;
vsize = NULL;
options[0] = 1;
switch (OpType) {
case OP_PMETIS:
mprintf("Recursive Partitioning... ------------------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
METIS_mCPartGraphRecursive(&graph.nvtxs, &graph.ncon, graph.xadj, graph.adjncy,
graph.vwgt, graph.adjwgt, &wgtflag, &numflag, &nparts, options, &edgecut, part);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, part, graph.nvtxs, nparts));
mprintf(" %D-way Edge-Cut: %7D\n", nparts, edgecut);
ComputePartitionInfo(&graph, nparts, part);
gk_free((void **)&part, LTERM);
break;
case OP_KMETIS:
mprintf("K-way Partitioning... ----------------------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
if (argc != 8+graph.ncon)
errexit("You must supply %d ub constraints!\n", graph.ncon);
for (i=0; i<graph.ncon; i++)
rubvec[i] = atof(argv[8+i]);
METIS_mCPartGraphKway(&graph.nvtxs, &graph.ncon, graph.xadj, graph.adjncy, graph.vwgt,
graph.adjwgt, &wgtflag, &numflag, &nparts, rubvec, options, &edgecut, part);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, part, graph.nvtxs, nparts));
mprintf(" %D-way Edge-Cut: %7D \tUBVec: ", nparts, edgecut);
for (i=0; i<graph.ncon; i++)
mprintf("%.3f ", rubvec[i]);
mprintf("\n");
ComputePartitionInfo(&graph, nparts, part);
if (options[OPTION_DBGLVL]&1024) {
/* Partition each objective separately and see the results */
vwgt = idxmalloc(graph.nvtxs, "vwgt");
for (j=0; j<graph.ncon; j++) {
for (i=0; i<graph.nvtxs; i++)
vwgt[i] = graph.vwgt[i*graph.ncon+j];
options[0] = 0;
METIS_PartGraphKway(&graph.nvtxs, graph.xadj, graph.adjncy, vwgt, graph.adjwgt,
&wgtflag, &numflag, &nparts, options, &edgecut, part);
mprintf("Partitioning using constrain %D ------------------------------------\n", j);
ComputePartitionInfo(&graph, nparts, part);
}
gk_free((void **)&vwgt, LTERM);
}
gk_free((void **)&part, LTERM);
break;
case 3:
mprintf("Recursive Partitioning... -----------------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
if (argc != 8+graph.ncon)
errexit("You must supply %d ub constraints!\n", graph.ncon);
for (i=0; i<graph.ncon; i++)
rubvec[i] = atof(argv[8+i]);
METIS_mCHPartGraphRecursive(&graph.nvtxs, &graph.ncon, graph.xadj, graph.adjncy,
graph.vwgt, graph.adjwgt, &wgtflag, &numflag, &nparts, rubvec, options, &edgecut, part);
IFSET(options[OPTION_DBGLVL], DBG_OUTPUT, WritePartition(filename, part, graph.nvtxs, nparts));
mprintf(" %D-way Edge-Cut: %7D \tUBVec: ", nparts, edgecut);
for (i=0; i<graph.ncon; i++)
mprintf("%.3f ", rubvec[i]);
mprintf("\n");
ComputePartitionInfo(&graph, nparts, part);
gk_free((void **)&part, LTERM);
break;
default:
errexit("Unknown");
}
gk_free((void **)&graph.xadj, &graph.adjncy, &graph.vwgt, &graph.adjwgt, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/partnmesh.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* partnmesh.c
*
* This file reads in the element node connectivity array of a mesh and
* partitions both the elements and the nodes using KMETIS on the dual graph.
*
* Started 9/29/97
* George
*
* $Id: partnmesh.c,v 1.2 2002/08/10 06:02:54 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, j, ne, nn, etype, mtype, numflag=0, nparts, edgecut;
idxtype *elmnts, *epart, *npart, *metype, *weights;
double IOTmr, DUALTmr;
char etypestr[5][5] = {"TRI", "TET", "HEX", "QUAD", "LINE"};
GraphType graph;
if (argc != 3) {
mprintf("Usage: %s <meshfile> <nparts>\n",argv[0]);
exit(0);
}
nparts = atoi(argv[2]);
if (nparts < 2) {
mprintf("nparts must be greater than one.\n");
exit(0);
}
gk_clearcputimer(IOTmr);
gk_clearcputimer(DUALTmr);
mtype=MeshType(argv[1]);
ne=MixedElements(argv[1]);
metype = idxmalloc(ne, "main: metype");
weights = idxmalloc(ne, "main: weights");
gk_startcputimer(IOTmr);
if(mtype==1)
elmnts = ReadMesh(argv[1], &ne, &nn, &etype);
else if(mtype==3)
elmnts = ReadMeshWgt(argv[1], &ne, &nn, &etype, weights);
else if(mtype==0)
elmnts = ReadMixedMesh(argv[1], &ne, &nn, metype);
else
elmnts = ReadMixedMeshWgt(argv[1], &ne, &nn, metype, weights);
gk_stopcputimer(IOTmr);
epart = idxmalloc(ne, "main: epart");
npart = idxmalloc(nn, "main: npart");
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Mesh Information ----------------------------------------------------\n");
if (mtype==1)
mprintf(" Name: %s, #Elements: %D, #Nodes: %D, Etype: %s\n\n", argv[1], ne, nn, etypestr[etype-1]);
else
mprintf(" Name: %s, #Elements: %D, #Nodes: %D, Etype: %s\n\n", argv[1], ne, nn, "Mixed");
mprintf("Partitioning Nodal Graph... -----------------------------------------\n");
gk_startcputimer(DUALTmr);
if (mtype==1 || mtype==3)
METIS_PartMeshNodal(&ne, &nn, elmnts, &etype, &numflag, &nparts, &edgecut, epart, npart);
else
METIS_PartMixedMeshNodal(&ne, &nn, elmnts, metype, &numflag, &nparts, &edgecut, epart, npart);
gk_stopcputimer(DUALTmr);
mprintf(" %D-way Edge-Cut: %7D, Balance: %5.2f\n", nparts, edgecut, ComputeElementBalance(ne, nparts, epart));
gk_startcputimer(IOTmr);
WriteMeshPartition(argv[1], nparts, ne, epart, nn, npart);
gk_stopcputimer(IOTmr);
mprintf("\nTiming Information --------------------------------------------------\n");
mprintf(" I/O: \t\t %7.3f\n", gk_getcputimer(IOTmr));
mprintf(" Partitioning: \t\t %7.3f\n", gk_getcputimer(DUALTmr));
mprintf("**********************************************************************\n");
/*
graph.nvtxs = ne;
graph.xadj = idxmalloc(ne+1, "xadj");
graph.vwgt = idxsmalloc(ne, 1, "vwgt");
graph.adjncy = idxmalloc(10*ne, "adjncy");
graph.adjwgt = idxsmalloc(10*ne, 1, "adjncy");
METIS_MeshToDual(&ne, &nn, elmnts, &etype, &numflag, graph.xadj, graph.adjncy);
ComputePartitionInfo(&graph, nparts, epart);
gk_free((void **)&graph.xadj, &graph.adjncy, &graph.vwgt, &graph.adjwgt, LTERM);
*/
gk_free((void **)&elmnts, &epart, &npart, &metype, &weights, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/cepic.c<|end_filename|>
/*
* Copyright 2003, Regents of the University of Minnesota
*
* cepic.c
*
* This file contains the driving routine for contact/impact simulations
* for EPIC meshes
*
* Started 4/12/03
* George
*
* $Id: cepic.c,v 1.15 2003/05/03 16:10:48 karypis Exp $
*
*/
#include <metisbin.h>
#define Flip_int32(type) (((type >>24) & 0x000000ff) | \
((type >> 8) & 0x0000ff00) | \
((type << 8) & 0x00ff0000) | \
((type <<24) & 0xff000000) )
#define Flip_int64(type) (((type >>56) & 0x00000000000000ff) | \
((type >>40) & 0x000000000000ff00) | \
((type >>24) & 0x0000000000ff0000) | \
((type >>8) & 0x00000000ff000000) | \
((type <<8) & 0x000000ff00000000) | \
((type <<24) & 0x0000ff0000000000) | \
((type <<40) & 0x00ff000000000000) | \
((type <<56) & 0xff00000000000000))
/*************************************************************************
* Let the game begin
**************************************************************************/
main(int argc, char *argv[])
{
idxtype i, j, k, istep, options[10], nn, ne, fstep, lstep, nparts, nboxes, u[3], dim, nsplit, flags=0, NSKIP=1;
char filename[256];
idxtype *mien, *mrng, *part, *sflag;
double *mxyz, *bxyz;
idxtype *xadj, *adjncy, *cntptr, *cntind;
idxtype numflag = 0, wgtflag = 0, edgecut, etype=2;
void *cinfo=NULL;
FILE *fpin;
long long int *ltmp;
if (argc <= 6) {
mfprintf(stderr, "Usage: %s <nn> <ne> <fstep> <lstep> <nparts> [flags] [NSKIP]\n", argv[0]);
exit(0);
}
nn = atoi(argv[1]);
ne = atoi(argv[2]);
fstep = atoi(argv[3]);
lstep = atoi(argv[4]);
nparts = atoi(argv[5]);
if (argc > 6)
flags = atoi(argv[6]);
if (argc > 7)
NSKIP = atoi(argv[7]);
mprintf("\n\n------------------------------------------------------------------------------------------\n");
mprintf("Reading nn: %D, ne: %D, fstep: %D, lstep: %D, nparts: %D\n", nn, ne, fstep, lstep, nparts);
mien = idxmalloc(4*ne, "main: mien");
mxyz = gk_dmalloc(3*nn, "main: mxyz");
mrng = idxmalloc(4*ne, "main: mrng");
bxyz = gk_dmalloc(6*ne*4, "main: bxyz");
part = idxmalloc(nn, "main: part");
sflag = idxmalloc(nn, "main: sflag");
xadj = idxmalloc(nn+1, "main: xadj");
adjncy = idxmalloc(50*nn, "main: adjncy");
/*========================================================================
* Read the initial mesh and setup the graph and contact information
*========================================================================*/
msprintf(filename, "mien.%04D", fstep);
fpin = GKfopen(filename, "rb", "main: mien");
fread(mien, sizeof(int), 4*ne, fpin);
for (i=0; i<4*ne; i++)
mien[i] = Flip_int32(mien[i]);
GKfclose(fpin);
/*========================================================================
* Create the nodal graph
*========================================================================*/
numflag = mien[idxargmin(4*ne, mien)];
METIS_MeshToNodal(&ne, &nn, mien, &etype, &numflag, xadj, adjncy);
/*========================================================================
* Get into the loop in which you go over the different configurations
*========================================================================*/
for (k=0, istep=fstep; istep<=lstep; istep++, k++) {
msprintf(filename, "mxyz.%04D", istep);
mprintf("Reading %s...............................................................\n", filename);
fpin = GKfopen(filename, "rb", "main: mxyz");
fread(mxyz, sizeof(double), 3*nn, fpin);
for (i=0; i<3*nn; i++) {
ltmp = (long long int *)(mxyz+i);
*ltmp = Flip_int64(*ltmp);
}
GKfclose(fpin);
msprintf(filename, "mrng.%04D", istep);
fpin = GKfopen(filename, "rb", "main: mrng");
fread(mrng, sizeof(int), 4*ne, fpin);
for (i=0; i<4*ne; i++)
mrng[i] = Flip_int32(mrng[i]);
GKfclose(fpin);
/* Determine which nodes are in the surface */
iset(nn, 0, sflag);
for (i=0; i<ne; i++) {
if (mrng[4*i+0] > 0) { /* 1, 2, 3 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
}
if (mrng[4*i+1] > 0) { /* 1, 2, 4 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
if (mrng[4*i+2] > 0) { /* 2, 3, 4 */
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
if (mrng[4*i+3] > 0) { /* 1, 3, 4 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
}
mprintf("Contact Nodes: %D of %D\n", isum(nn, sflag), nn);
/* Compute/Update the partitioning */
if (k%NSKIP == 0) {
if (cinfo != NULL)
METIS_FreeContactInfo(cinfo);
options[0] = 0;
cinfo = METIS_PartGraphForContact(&nn, xadj, adjncy, mxyz, sflag, &numflag, &nparts,
options, &edgecut, part);
for (i=0; i<nn; i++)
part[i]--;
}
switch (flags) {
case 1:
if (cinfo != NULL)
METIS_FreeContactInfo(cinfo);
cinfo = METIS_SetupContact(&nn, mxyz, sflag, &nparts, part);
break;
case 2:
if (cinfo != NULL)
METIS_FreeContactInfo(cinfo);
cinfo = METIS_SetupContact0(&nn, mxyz, sflag, &nparts, part);
break;
default:
METIS_UpdateContactInfo(cinfo, &nn, mxyz, sflag);
}
/* Determine the bounding boxes of the surface elements */
for (nsplit=0, nboxes=0, i=0; i<ne; i++) {
if (mrng[4*i+0] > 0) { /* 1, 2, 3 */
u[0] = mien[4*i+0]-1;
u[1] = mien[4*i+1]-1;
u[2] = mien[4*i+2]-1;
bxyz[6*nboxes+0] = bxyz[6*nboxes+3] = mxyz[3*u[0]+0];
bxyz[6*nboxes+1] = bxyz[6*nboxes+4] = mxyz[3*u[0]+1];
bxyz[6*nboxes+2] = bxyz[6*nboxes+5] = mxyz[3*u[0]+2];
for (j=1; j<3; j++) {
for (dim=0; dim<3; dim++) {
bxyz[6*nboxes+dim] = (bxyz[6*nboxes+dim] > mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+dim]);
bxyz[6*nboxes+3+dim] = (bxyz[6*nboxes+3+dim] < mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+3+dim]);
}
}
nboxes++;
if (part[u[0]] != part[u[1]] || part[u[0]] != part[u[2]])
nsplit++;
}
if (mrng[4*i+1] > 0) { /* 1, 2, 4 */
u[0] = mien[4*i+0]-1;
u[1] = mien[4*i+1]-1;
u[2] = mien[4*i+3]-1;
bxyz[6*nboxes+0] = bxyz[6*nboxes+3] = mxyz[3*u[0]+0];
bxyz[6*nboxes+1] = bxyz[6*nboxes+4] = mxyz[3*u[0]+1];
bxyz[6*nboxes+2] = bxyz[6*nboxes+5] = mxyz[3*u[0]+2];
for (j=1; j<3; j++) {
for (dim=0; dim<3; dim++) {
bxyz[6*nboxes+dim] = (bxyz[6*nboxes+dim] > mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+dim]);
bxyz[6*nboxes+3+dim] = (bxyz[6*nboxes+3+dim] < mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+3+dim]);
}
}
nboxes++;
if (part[u[0]] != part[u[1]] || part[u[0]] != part[u[2]])
nsplit++;
}
if (mrng[4*i+2] > 0) { /* 2, 3, 4 */
u[0] = mien[4*i+1]-1;
u[1] = mien[4*i+2]-1;
u[2] = mien[4*i+3]-1;
bxyz[6*nboxes+0] = bxyz[6*nboxes+3] = mxyz[3*u[0]+0];
bxyz[6*nboxes+1] = bxyz[6*nboxes+4] = mxyz[3*u[0]+1];
bxyz[6*nboxes+2] = bxyz[6*nboxes+5] = mxyz[3*u[0]+2];
for (j=1; j<3; j++) {
for (dim=0; dim<3; dim++) {
bxyz[6*nboxes+dim] = (bxyz[6*nboxes+dim] > mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+dim]);
bxyz[6*nboxes+3+dim] = (bxyz[6*nboxes+3+dim] < mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+3+dim]);
}
}
nboxes++;
if (part[u[0]] != part[u[1]] || part[u[0]] != part[u[2]])
nsplit++;
}
if (mrng[4*i+3] > 0) { /* 1, 3, 4 */
u[0] = mien[4*i+0]-1;
u[1] = mien[4*i+2]-1;
u[2] = mien[4*i+3]-1;
bxyz[6*nboxes+0] = bxyz[6*nboxes+3] = mxyz[3*u[0]+0];
bxyz[6*nboxes+1] = bxyz[6*nboxes+4] = mxyz[3*u[0]+1];
bxyz[6*nboxes+2] = bxyz[6*nboxes+5] = mxyz[3*u[0]+2];
for (j=1; j<3; j++) {
for (dim=0; dim<3; dim++) {
bxyz[6*nboxes+dim] = (bxyz[6*nboxes+dim] > mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+dim]);
bxyz[6*nboxes+3+dim] = (bxyz[6*nboxes+3+dim] < mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+3+dim]);
}
}
nboxes++;
if (part[u[0]] != part[u[1]] || part[u[0]] != part[u[2]])
nsplit++;
}
}
METIS_FindContacts(cinfo, &nboxes, bxyz, &nparts, &cntptr, &cntind);
mprintf("Contacting Elements: %D Indices: %D Nsplit: %D\n", nboxes, cntptr[nboxes]-nboxes, nsplit);
gk_free((void **)&cntptr, &cntind, LTERM);
}
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/metisbin.h<|end_filename|>
/*
* metisbin.h
*
* This file contains the various header inclusions
*
* Started 8/9/02
* George
*/
#include <GKlib.h>
#include <stddef.h>
#include <stdlib.h>
#include <stdarg.h>
#include <stdio.h>
#include <ctype.h>
#include <math.h>
#include <time.h>
#include <string.h>
#include <limits.h>
#include <signal.h>
#include <setjmp.h>
#include <assert.h>
#if defined(ENABLE_OPENMP)
#include <omp.h>
#endif
#include <metis.h>
#include "../libmetis/defs.h"
#include "../libmetis/struct.h"
#include "../libmetis/rename.h"
#include "../libmetis/macros.h"
#include "../libmetis/proto.h"
#include "defs.h"
#include "struct.h"
#include "proto.h"
#if defined(COMPILER_MSC)
#define rint(x) ((idxtype)((x)+0.5)) /* MSC does not have rint() function */
#endif
#if defined(COMPILER_GCC)
extern char* strdup (const char *);
#endif
<|start_filename|>RecastDemo/Contrib/metis/source/programs/kfmetis.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* kfmetis.c
*
* This file contains the driving routine for partitioning for
* sub-domain direct factorization.
*
* Started 8/11/02
* George
*
* $Id: kfmetis.c,v 1.2 2002/08/13 16:40:14 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, options[10];
idxtype *part;
float lbvec[MAXNCON];
GraphType graph;
idxtype numflag = 0, wgtflag = 0, edgecut;
ParamType params;
double TOTALTmr, METISTmr, IOTmr;
parse_cmdline(¶ms, argc, argv);
if (params.nparts < 2) {
mprintf("The number of partitions should be greater than 1!\n");
exit(0);
}
gk_clearcputimer(TOTALTmr);
gk_clearcputimer(METISTmr);
gk_clearcputimer(IOTmr);
gk_startcputimer(TOTALTmr);
gk_startcputimer(IOTmr);
ReadGraph(&graph, params.filename, &wgtflag);
if (graph.nvtxs <= 0) {
mprintf("Empty graph. Nothing to do.\n");
exit(0);
}
gk_stopcputimer(IOTmr);
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Graph Information ---------------------------------------------------\n");
mprintf(" Name: %s, #Vertices: %D, #Edges: %D, #Parts: %D\n", params.filename, graph.nvtxs, graph.nedges/2, params.nparts);
if (graph.ncon > 1)
mprintf(" Balancing Constraints: %D\n", graph.ncon);
mprintf("\nRecursive Partitioning... -------------------------------------------\n");
part = idxmalloc(graph.nvtxs, "main: part");
options[0] = 0;
gk_startcputimer(METISTmr);
METIS_PartFillGraph(&graph.nvtxs, graph.xadj, graph.adjncy, graph.vwgt, graph.adjwgt,
&wgtflag, &numflag, &(params.nparts), options, &edgecut, part);
gk_stopcputimer(METISTmr);
ComputePartitionBalance(&graph, params.nparts, part, lbvec);
mprintf(" %D-way Edge-Cut: %7D, Balance: ", params.nparts, edgecut);
for (i=0; i<graph.ncon; i++)
mprintf("%5.2f ", lbvec[i]);
mprintf("\n");
gk_startcputimer(IOTmr);
// WritePartition(params.filename, part, graph.nvtxs, params.nparts);
gk_stopcputimer(IOTmr);
gk_stopcputimer(TOTALTmr);
mprintf("\nTiming Information --------------------------------------------------\n");
mprintf(" I/O: \t\t %7.3f\n", gk_getcputimer(IOTmr));
mprintf(" Partitioning: \t\t %7.3f (PMETIS time)\n", gk_getcputimer(METISTmr));
mprintf(" Total: \t\t %7.3f\n", gk_getcputimer(TOTALTmr));
mprintf("**********************************************************************\n");
gk_free((void **)&graph.xadj, &graph.adjncy, &graph.vwgt, &graph.adjwgt, &part, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/GKlib/trunk/test/gksort.c<|end_filename|>
/*!
\file gksort.c
\brief Testing module for the various sorting routines in GKlib
\date Started 4/4/2007
\author George
\version\verbatim $Id: gksort.c 1421 2007-04-06 14:37:41Z karypis $ \endverbatim
*/
#include <GKlib.h>
#define N 10000
/*************************************************************************/
/*! Testing module for gk_?isort() routine */
/*************************************************************************/
void test_isort()
{
gk_idx_t i;
int array[N];
/* test the increasing sort */
printf("Testing iisort...\n");
for (i=0; i<N; i++)
array[i] = RandomInRange(123432);
gk_iisort(N, array);
for (i=0; i<N-1; i++) {
if (array[i] > array[i+1])
printf("gk_iisort error at index %jd [%d %d]\n", (intmax_t)i, array[i], array[i+1]);
}
/* test the decreasing sort */
printf("Testing disort...\n");
for (i=0; i<N; i++)
array[i] = RandomInRange(123432);
gk_disort(N, array);
for (i=0; i<N-1; i++) {
if (array[i] < array[i+1])
printf("gk_disort error at index %jd [%d %d]\n", (intmax_t)i, array[i], array[i+1]);
}
}
/*************************************************************************/
/*! Testing module for gk_?fsort() routine */
/*************************************************************************/
void test_fsort()
{
gk_idx_t i;
float array[N];
/* test the increasing sort */
printf("Testing ifsort...\n");
for (i=0; i<N; i++)
array[i] = RandomInRange(123432)/(1.0+RandomInRange(645323));
gk_ifsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i] > array[i+1])
printf("gk_ifsort error at index %jd [%f %f]\n", (intmax_t)i, array[i], array[i+1]);
}
/* test the decreasing sort */
printf("Testing dfsort...\n");
for (i=0; i<N; i++)
array[i] = RandomInRange(123432)/(1.0+RandomInRange(645323));
gk_dfsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i] < array[i+1])
printf("gk_dfsort error at index %jd [%f %f]\n", (intmax_t)i, array[i], array[i+1]);
}
}
/*************************************************************************/
/*! Testing module for gk_?idxsort() routine */
/*************************************************************************/
void test_idxsort()
{
gk_idx_t i;
gk_idx_t array[N];
/* test the increasing sort */
printf("Testing iidxsort...\n");
for (i=0; i<N; i++)
array[i] = RandomInRange(123432);
gk_iidxsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i] > array[i+1])
printf("gk_iidxsort error at index %jd [%jd %jd]\n", (intmax_t)i, array[i], array[i+1]);
}
/* test the decreasing sort */
printf("Testing didxsort...\n");
for (i=0; i<N; i++)
array[i] = RandomInRange(123432);
gk_didxsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i] < array[i+1])
printf("gk_didxsort error at index %jd [%jd %jd]\n", (intmax_t)i, array[i], array[i+1]);
}
}
/*************************************************************************/
/*! Testing module for gk_?ikvsort() routine */
/*************************************************************************/
void test_ikvsort()
{
gk_idx_t i;
gk_ikv_t array[N];
/* test the increasing sort */
printf("Testing iikvsort...\n");
for (i=0; i<N; i++) {
array[i].key = RandomInRange(123432);
array[i].val = i;
}
gk_iikvsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i].key > array[i+1].key)
printf("gk_iikvsort error at index %jd [%d %d] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
/* test the decreasing sort */
printf("Testing dikvsort...\n");
for (i=0; i<N; i++) {
array[i].key = RandomInRange(123432);
array[i].val = i;
}
gk_dikvsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i].key < array[i+1].key)
printf("gk_dikvsort error at index %jd [%d %d] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
}
/*************************************************************************/
/*! Testing module for gk_?fkvsort() routine */
/*************************************************************************/
void test_fkvsort()
{
gk_idx_t i;
gk_fkv_t array[N];
/* test the increasing sort */
printf("Testing ifkvsort...\n");
for (i=0; i<N; i++) {
array[i].key = RandomInRange(123432)/(1.0+RandomInRange(645323));
array[i].val = i;
}
gk_ifkvsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i].key > array[i+1].key)
printf("gk_ifkvsort error at index %jd [%f %f] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
/* test the decreasing sort */
printf("Testing dfkvsort...\n");
for (i=0; i<N; i++) {
array[i].key = RandomInRange(123432)/(1.0+RandomInRange(645323));
array[i].val = i;
}
gk_dfkvsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i].key < array[i+1].key)
printf("gk_dfkvsort error at index %jd [%f %f] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
}
/*************************************************************************/
/*! Testing module for gk_?dkvsort() routine */
/*************************************************************************/
void test_dkvsort()
{
gk_idx_t i;
gk_dkv_t array[N];
/* test the increasing sort */
printf("Testing idkvsort...\n");
for (i=0; i<N; i++) {
array[i].key = RandomInRange(123432)/(1.0+RandomInRange(645323));
array[i].val = i;
}
gk_idkvsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i].key > array[i+1].key)
printf("gk_idkvsort error at index %jd [%lf %lf] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
/* test the decreasing sort */
printf("Testing ddkvsort...\n");
for (i=0; i<N; i++) {
array[i].key = RandomInRange(123432)/(1.0+RandomInRange(645323));
array[i].val = i;
}
gk_ddkvsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i].key < array[i+1].key)
printf("gk_ddkvsort error at index %jd [%lf %lf] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
}
/*************************************************************************/
/*! Testing module for gk_?skvsort() routine */
/*************************************************************************/
void test_skvsort()
{
gk_idx_t i;
gk_skv_t array[N];
char line[256];
/* test the increasing sort */
printf("Testing iskvsort...\n");
for (i=0; i<N; i++) {
sprintf(line, "%d", RandomInRange(123432));
array[i].key = gk_strdup(line);
array[i].val = i;
}
gk_iskvsort(N, array);
for (i=0; i<N-1; i++) {
if (strcmp(array[i].key, array[i+1].key) > 0)
printf("gk_idkvsort error at index %jd [%s %s] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
/* test the decreasing sort */
printf("Testing dskvsort...\n");
for (i=0; i<N; i++) {
sprintf(line, "%d", RandomInRange(123432));
array[i].key = gk_strdup(line);
array[i].val = i;
}
gk_dskvsort(N, array);
for (i=0; i<N-1; i++) {
/*printf("%s\n", array[i].key);*/
if (strcmp(array[i].key, array[i+1].key) < 0)
printf("gk_ddkvsort error at index %jd [%s %s] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
}
/*************************************************************************/
/*! Testing module for gk_?idxkvsort() routine */
/*************************************************************************/
void test_idxkvsort()
{
gk_idx_t i;
gk_idxkv_t array[N];
/* test the increasing sort */
printf("Testing iidxkvsort...\n");
for (i=0; i<N; i++) {
array[i].key = RandomInRange(123432);
array[i].val = i;
}
gk_iidxkvsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i].key > array[i+1].key)
printf("gk_iidxkvsort error at index %jd [%jd %jd] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
/* test the decreasing sort */
printf("Testing didxkvsort...\n");
for (i=0; i<N; i++) {
array[i].key = RandomInRange(123432);
array[i].val = i;
}
gk_didxkvsort(N, array);
for (i=0; i<N-1; i++) {
if (array[i].key < array[i+1].key)
printf("gk_didxkvsort error at index %jd [%jd %jd] [%jd %jd]\n", (intmax_t)i, array[i].key, array[i+1].key, (intmax_t)array[i].val, (intmax_t)array[i+1].val);
}
}
int main()
{
test_isort();
test_fsort();
test_idxsort();
test_ikvsort();
test_fkvsort();
test_dkvsort();
test_skvsort();
test_idxkvsort();
}
<|start_filename|>HierarchicalPathfinding/Source/Navigation.cpp<|end_filename|>
#include "Navigation.h"
void Navigation::init()
{
//liberate memory
for(int i = 0; i < numGraphs; i++)
{
graphs[i].nodes->DestroyIntraEdge();
graphs[i].nodes->DestroyEdge();
graphs[i].Destroy();
}
numGraphs = 0;
//reserve memory por graphs
graphs = (Graph*)dtAlloc(sizeof(Graph)*levels, DT_ALLOC_PERM);
memset(graphs, 0, sizeof(Graph)*levels);
numLevel = 0;
//reserve memory por pool nodes
m_nodePool = 0;
m_openList = 0;
if (!m_nodePool || m_nodePool->getMaxNodes() < maxNodes)
{
if (m_nodePool)
{
m_nodePool->~dtNodePool();
dtFree(m_nodePool);
m_nodePool = 0;
}
m_nodePool = new (dtAlloc(sizeof(dtNodePool), DT_ALLOC_PERM)) dtNodePool(maxNodes, dtNextPow2(maxNodes/4));
}
else
{
m_nodePool->clear();
}
//reserve memory for open list
if (!m_openList || m_openList->getCapacity() < maxNodes)
{
if (m_openList)
{
m_openList->~dtNodeQueue();
dtFree(m_openList);
m_openList = 0;
}
m_openList = new (dtAlloc(sizeof(dtNodeQueue), DT_ALLOC_PERM)) dtNodeQueue(maxNodes);
}
else
{
m_openList->clear();
}
//get the base poly ref
refBase = m_navMesh->getPolyRefBase(tile);
}
void Navigation::createHierarchicalGraph(int p_levels,int p_level,int p_mergedPolys, rcContext* ctx,const dtMeshTile* ptile, const dtNavMesh* pm_navMesh, const dtNavMeshQuery* pm_navQuery, std::map<dtPolyRef, int> &nodesInCluster)
{
rcAssert(ctx);
if(maxPolyInNode == p_mergedPolys && levels == p_levels)
{
level = p_level;
setGraph();
}
else
{
ctx->startTimer(RC_TIMER_BUILD_HIERARCHICAL_GRAPH);
levels = p_levels;
level = p_level;
maxPolyInNode = p_mergedPolys;
tile = ptile;
m_navMesh = pm_navMesh;
m_navQuery = pm_navQuery;
init();
const dtPoly* poly = 0;
const dtPoly* neighbourPoly = 0;
dtPolyRef ref;
dtPolyRef neighbourRef;
int numEdgesNode = 0;
//Main graph
parentGraph.Init(tile->header->polyCount);
//create nodes
for (int i = 0; i < tile->header->polyCount; ++i)
{
parentGraph.AddNode(i);
}
//reserve memory for edges
for(int i = 0; i < parentGraph.numNodes; i++)
{
ref = parentGraph.nodes[i].idNode + refBase;
m_navMesh->getTileAndPolyByRefUnsafe(ref, &tile, &poly);
for (unsigned int j = poly->firstLink; j != DT_NULL_LINK; j = tile->links[j].next)
{
numEdgesNode++;
}
if(numEdgesNode > 0)
{
parentGraph.InitEdge(ref - refBase, numEdgesNode);
numEdgesNode = 0;
}
}
//create edges
int idPos = 1;
std::map<std::pair<dtPolyRef, dtPolyRef>, float *> positions;
for(int i = 0; i < parentGraph.numNodes; i++)
{
//current node
ref = parentGraph.nodes[i].idNode + refBase;
m_navMesh->getTileAndPolyByRefUnsafe(ref, &tile, &poly);
for (unsigned int j = poly->firstLink; j != DT_NULL_LINK; j = tile->links[j].next)
{
neighbourRef = tile->links[j].ref;
m_navMesh->getTileAndPolyByRefUnsafe(neighbourRef, &tile, &neighbourPoly);
//get position
float mid[3];
m_navQuery->getEdgeMidPoint(ref, poly, tile, neighbourRef, neighbourPoly, tile, mid);
std::pair<dtPolyRef,dtPolyRef> p1;
std::pair<dtPolyRef,dtPolyRef> p2;
p1 = std::make_pair(ref,neighbourRef);
auto it1 = positions.find(p1);
if(it1 == positions.end())
{
p2 = std::make_pair(neighbourRef,ref);
//add edge
parentGraph.AddEdge(ref - refBase,neighbourRef - refBase, mid, idPos,ref - refBase);
parentGraph.AddEdge(neighbourRef- refBase,ref - refBase, mid, idPos,neighbourRef- refBase);
positions.insert(std::make_pair<std::pair<dtPolyRef,dtPolyRef>,float*>(p1,mid));
positions.insert(std::make_pair<std::pair<dtPolyRef,dtPolyRef>,float*>(p2,mid));
idPos++;
}
}
}
numTotalEdges = idPos;
positions.clear();
//hierarchical subdivisions
mainGraph = parentGraph;
graphs[numGraphs++] = parentGraph;
buildHierarchy();
setGraph();
ctx->stopTimer(RC_TIMER_BUILD_HIERARCHICAL_GRAPH);
for(int i = 0; i < numGraphs; i++)
{
ctx->log(RC_LOG_PROGRESS, ">>Level %d = %d nodes.", i, graphs[i].numNodes);
}
std::ofstream fout("results.txt", std::ios_base::app | std::ios_base::out);
fout << "\n\nMerged Polys: " << p_mergedPolys << "\n";
for(int i = 0; i < numGraphs; i++)
{
fout << "level " << i << ": " << graphs[i].numNodes << "\n";
}
fout.close();
}
//get painted nodes
if(level !=0)
{
for(int i = 0; i < graphs[0].numNodes; i++)
{
dtPolyRef nodeId = graphs[0].nodes[i].idNode;
Graph::Node* node = getNode(nodeId,0);
if(node == NULL)
nodesInCluster.insert(std::make_pair(nodeId + refBase, -1));
else
nodesInCluster.insert(std::make_pair(nodeId + refBase, node->idNode));
}
}
}
dtStatus Navigation::findPathNav(rcContext* ctx, dtPolyRef startRef, dtPolyRef endRef,const float* startPos, const float* endPos, const dtQueryFilter* filter, dtPolyRef* path, int &pathCount, const int maxPath)
{
rcAssert(ctx);
std::ofstream fout("results.txt", std::ios_base::app | std::ios_base::out);
fout << "\n" << maxPolyInNode << " " << level << " " << currentGraph.numNodes << " ";
dtPolyRef tempPathNodes[128];
int tempPathPolys[128];
int nTempPath = 0;
ctx->startTimer(RC_TIMER_GET_PARENT_NODES);
Graph::Node *sNode = getNode(startRef-refBase,0);
Graph::Node *eNode = getNode(endRef-refBase,0);
if(sNode == NULL || eNode==NULL)
return DT_FAILURE;
fout << sNode->numEdges << " " << eNode->numEdges << " ";
int nSNodeIntraedges = sNode->numIntraEdges;
int nENodeIntraedges = eNode->numIntraEdges;
ctx->stopTimer(RC_TIMER_GET_PARENT_NODES);
fout << ctx->getAccumulatedTime(RC_TIMER_GET_PARENT_NODES)/1000.0f << " ";
if((level == 0) || (sNode->idNode == eNode->idNode))
{
ctx->startTimer(RC_TIMER_FIND_HIERACHICAL_PATH);
m_navQuery->findPath(startRef, endRef, startPos, endPos, filter, path, &pathCount, MAX_POLYS);
ctx->stopTimer(RC_TIMER_FIND_HIERACHICAL_PATH);
fout << ctx->getAccumulatedTime(RC_TIMER_FIND_HIERACHICAL_PATH)/1000.0f << " ";
return DT_SUCCESS;
}
//new start and end position id
int startIdPos = numTotalEdges+1;
int endIdPos = startIdPos+1;
//add node start to graph
ctx->startTimer(RC_TIMER_LINK_START_END_NODES);
linkStartToGraph(sNode,startRef,startPos,startIdPos);
linkEndToGraph(eNode,endRef,endPos, endIdPos);
ctx->stopTimer(RC_TIMER_LINK_START_END_NODES);
fout << ctx->getAccumulatedTime(RC_TIMER_LINK_START_END_NODES)/1000.0f << " ";
//find path
ctx->startTimer(RC_TIMER_FIND_HIERACHICAL_PATH);
startRef = sNode->idNode;
endRef = eNode->idNode;
findHierarchicalPath(startRef, endRef, startIdPos, endIdPos, startPos, endPos, tempPathNodes, tempPathPolys, &nTempPath, maxPath);
ctx->stopTimer(RC_TIMER_FIND_HIERACHICAL_PATH);
fout << ctx->getAccumulatedTime(RC_TIMER_FIND_HIERACHICAL_PATH)/1000.0f << " ";
int posId;
int n = 0;
//start
ctx->startTimer(RC_TIMER_GET_SUBPATH);
if(nTempPath > 1)
{
posId = tempPathNodes[1];
for(unsigned int i = nSNodeIntraedges; i < sNode->numIntraEdges; i++)
{
if((sNode->intraEdges[i].startPosId == startIdPos) && (sNode->intraEdges[i].endPosId == posId))
{
memcpy(path, sNode->intraEdges[i].path, sizeof(dtPolyRef)*sNode->intraEdges[i].nPath);
n = sNode->intraEdges[i].nPath;
break;
}
}
}
//middle
for(int i = 1; i < nTempPath-1; i++)
{
Graph::Node *node = ¤tGraph.nodes[tempPathPolys[i]];
getPath(tempPathNodes[i], tempPathNodes[i+1], node, level, path, n);
}
//end
posId = tempPathNodes[nTempPath-1];
for(unsigned int i = nENodeIntraedges; i < eNode->numIntraEdges; i++)
{
if((eNode->intraEdges[i].startPosId == posId) && (eNode->intraEdges[i].endPosId == endIdPos))
{
memcpy(path + n, eNode->intraEdges[i].path, sizeof(dtPolyRef)*eNode->intraEdges[i].nPath);
n += eNode->intraEdges[i].nPath;
break;
}
}
ctx->stopTimer(RC_TIMER_GET_SUBPATH);
fout << ctx->getAccumulatedTime(RC_TIMER_GET_SUBPATH)/1000.0f << " ";
pathCount = n;
//delete start node and links
ctx->startTimer(RC_TIMER_DELETE_START_END_NODES);
sNode->numIntraEdges = nSNodeIntraedges;
eNode->numIntraEdges = nENodeIntraedges;
ctx->stopTimer(RC_TIMER_DELETE_START_END_NODES);
fout << ctx->getAccumulatedTime(RC_TIMER_DELETE_START_END_NODES)/1000.0f;
fout.close();
return DT_SUCCESS;
}
void Navigation::getPath(int fromPosId, int toPosId, Graph::Node *node, int l, dtPolyRef* tempPath, int &nTempPath)
{
if(l==0)
return;
for(unsigned int i = 0; i < node->numIntraEdges; i++)
{
if((node->intraEdges[i].startPosId == fromPosId) && (node->intraEdges[i].endPosId == toPosId))
{
Graph::IntraEdge intraEdge =node->intraEdges[i];
l--;
if(l==0)
{
memcpy(tempPath + nTempPath, intraEdge.path, sizeof(dtPolyRef)*intraEdge.nPath);
nTempPath += intraEdge.nPath;
return;
}
else
{
dtPolyRef nextRef;
int startIdPos;
int endIdPos;
startIdPos = fromPosId;
//for each subNode
int n = intraEdge.nPath;
for(int j = 0; j < n; j++ )
{
Graph::Node * subNode = &graphs[l].nodes[intraEdge.path[j]];
nextRef = intraEdge.path[(j+1)%n];
if(j==n-1)
{
endIdPos = toPosId;
}
else
{
unsigned int numSubEdges = subNode->numEdges;
for(unsigned int k = 0; k < numSubEdges; k++ )
{
if(subNode->edges[k].targetNodeId == nextRef)
{
endIdPos = subNode->edges[k].idPos;
break;
}
}
}
getPath(startIdPos, endIdPos, subNode, l,tempPath, nTempPath);
startIdPos = endIdPos;
}
}
break;
}
}
}
dtStatus Navigation::findHierarchicalPath(dtPolyRef startRef, dtPolyRef endRef, int startIdPos, int endIdPos, const float* startPos, const float* endPos, dtPolyRef* tempPathNodes, int* tempPathPolys, int *nTempPath, const int maxPath)
{
dtAssert(m_nodePool);
dtAssert(m_openList);
m_nodePool->clear();
m_openList->clear();
dtNode* startNode = m_nodePool->getNode(startIdPos);
dtVcopy(startNode->pos, startPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = dtVdist(startPos, endPos) * H_SCALE;
startNode->id = startIdPos;
startNode->idPos = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
dtNode* lastBestNode = startNode;
float lastBestNodeCost = startNode->total;
dtStatus status = DT_SUCCESS;
while (!m_openList->empty())
{
// Remove node from open list and put it in closed list.
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Reached the goal, stop searching.
if (bestNode->id == endIdPos)
{
lastBestNode = bestNode;
break;
}
const Graph::Node* node = ¤tGraph.nodes[bestNode->idPos];
// Get parent
dtPolyRef parentRef = -1;
if (bestNode->pidx)
parentRef = m_nodePool->getNodeAtIdx(bestNode->pidx)->id;
for(unsigned int i=0; i < node->numEdges; i++)
{
dtPolyRef neighbourRef = node->edges[i].idPos;
if(bestNode->id ==neighbourRef)
continue;
if (neighbourRef == parentRef)
continue;
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef);
if(!neighbourNode)
continue;
// If the node is visited the first time, calculate node position.
if (neighbourNode->flags == 0)
{
dtVcopy(neighbourNode->pos, node->edges[i].pos);
}
// Calculate cost and heuristic.
float cost = 0.0f;
float heuristic = 0.0f;
// Special case for last node.
if (neighbourRef == endIdPos)
{
const float curCost = getCost(node, bestNode->id, neighbourNode->id, bestNode->pos, neighbourNode->pos);
//const Graph::Node* eNode = currentGraph.nodes[endRef];
//const float endCost = getCost(eNode, neighbourNode->id, endIdPos, neighbourNode->pos, endPos);
const float endCost = getCost(node, neighbourNode->id, endIdPos, neighbourNode->pos, endPos);
cost = bestNode->cost + curCost + endCost;
heuristic = 0.0f;
}
else
{
const float curCost = getCost(node, bestNode->id, neighbourNode->id,bestNode->pos, neighbourNode->pos);
cost = bestNode->cost + curCost;
heuristic = dtVdist(neighbourNode->pos, endPos)*H_SCALE;
}
const float total = cost + heuristic;
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
// The node is already visited and process, and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_CLOSED) && total >= neighbourNode->total)
continue;
// Add or update the node.
neighbourNode->pidx = m_nodePool->getNodeIdx(bestNode);
neighbourNode->idPos = node->edges[i].targetNodeId;
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~DT_NODE_CLOSED);
neighbourNode->cost = cost;
neighbourNode->total = total;
if (neighbourNode->flags & DT_NODE_OPEN)
{
// Already in open, update node location.
m_openList->modify(neighbourNode);
}
else
{
// Put the node in open list.
neighbourNode->flags |= DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
// Update nearest node to target so far.
if (heuristic < lastBestNodeCost)
{
lastBestNodeCost = heuristic;
lastBestNode = neighbourNode;
}
}
}
// Reverse the path.
dtNode* prev = 0;
dtNode* node = lastBestNode;
do
{
dtNode* next = m_nodePool->getNodeAtIdx(node->pidx);
node->pidx = m_nodePool->getNodeIdx(prev);
prev = node;
node = next;
}
while (node);
// Store path
node = prev;
int n = 0;
do
{
tempPathNodes[n] = node->id;
tempPathPolys[n] = node->idPos;
n++;
if (n >= maxPath)
{
break;
}
node = m_nodePool->getNodeAtIdx(node->pidx);
}
while (node);
*nTempPath = n;
return status;
}
void Navigation::linkStartToGraph(Graph::Node *node, dtPolyRef startRef, const float *startPos, int startIdPos)
{
for(unsigned int i=0; i < node->numEdges; i++)
{
dtPolyRef m_polys[MAX_POLYS];
int m_npolys;
float cost = findPath(startRef - refBase, node->edges[i].idPoly, startPos, node->edges[i].pos,m_polys,m_npolys,MAX_POLYS);
currentGraph.AddIntraEdge(node->idNode, startIdPos, node->edges[i].idPos, cost, m_polys,m_npolys);
}
}
void Navigation::linkEndToGraph(Graph::Node *node, dtPolyRef startRef, const float *startPos, int startIdPos)
{
for(unsigned int i=0; i < node->numEdges; i++)
{
dtPolyRef m_polys[MAX_POLYS];
int m_npolys;
float cost = findPath(node->edges[i].idPoly, startRef - refBase, node->edges[i].pos, startPos,m_polys,m_npolys,MAX_POLYS);
currentGraph.AddIntraEdge(node->idNode, node->edges[i].idPos, startIdPos, cost, m_polys,m_npolys);
}
}
float Navigation::getCost(const Graph::Node *node, int startPosId, int endPosId, const float * startPos, const float * endPos)
{
if(startPosId == endPosId )
return 0.0f;
/*for(unsigned int i = 0; i < node->numIntraEdges; i++)
{
if((node->intraEdges[i].startPosId == startPosId) && (node->intraEdges[i].endPosId == endPosId))
{
return node->intraEdges[i].cost;
}
}*/
return dtVdist(startPos, endPos);
}
void Navigation::buildHierarchy()
{
do{
//merge nodes
//numParts = floor((float(parentGraph.numNodes) / float(maxPolyInNode)) +0.5f);
numParts = parentGraph.numNodes / maxPolyInNode;
if(numParts < 2 || numParts >= parentGraph.numNodes)
break;
//merge nodes and build cluster
mergeNodes();
//create subgraph
buildNodes();
buildEdges();
graphs[numGraphs++] = currentGraph;
parentGraph.nodes = currentGraph.nodes;
parentGraph.numNodes = currentGraph.numNodes;
nodeCluster.clear();
clusterNode.clear();
numLevel++;
}while(numLevel < levels);
}
void Navigation::mergeNodes()
{
int maxLinkCount = 0;
//number edges
for(int j = 0; j < parentGraph.numNodes; j++)
{
Graph::Node * node = &parentGraph.nodes[j];
for(unsigned int i=0; i < node->numEdges; i++)
{
maxLinkCount++;
}
}
idxtype* xadj = (idxtype*)dtAlloc( (parentGraph.numNodes+1)*sizeof(idxtype), DT_ALLOC_TEMP );
idxtype* adjncy = (idxtype*)dtAlloc( maxLinkCount*2*sizeof(idxtype), DT_ALLOC_TEMP );
idxtype* part = (idxtype*)dtAlloc( parentGraph.numNodes * sizeof(idxtype), DT_ALLOC_TEMP );
//get parameters
int xID = 0;
int adjID = 0;
unsigned int ip;
for(int j = 0; j < parentGraph.numNodes; j++)
{
xadj[xID++] = adjID;
Graph::Node * node = &parentGraph.nodes[j];
for(unsigned int i=0; i < node->numEdges; i++)
{
ip = node->edges[i].targetNodeId;
adjncy[adjID++] = ip;
}
}
xadj[xID] = adjID;
idxtype wgtflag = 0;
idxtype numflag = 0;
int options[5] = {0,0,0,0,0};
idxtype edgecut = -1;
//Metis
METIS_PartGraphKway(&parentGraph.numNodes, xadj, adjncy, NULL, NULL, &wgtflag, &numflag, &numParts, options, &edgecut, part);
//get partitions (check merged nodes are neighbours
checkPartition(part, parentGraph.numNodes, numParts);
//set the partition number for each node
int partCount = 0;
for(int j = 0; j < parentGraph.numNodes; ++j)
{
int idC = part[partCount++];
Graph::Node *node = &parentGraph.nodes[j];
if(!node)
continue;
if(node->numEdges <= 0)
continue;
clusterNode.insert(std::make_pair<dtPolyRef, dtPolyRef>(idC, j));
}
//add parent for each child
int nodeId = 0;
for(auto it = clusterNode.begin(), end = clusterNode.end(); it != end; it = clusterNode.upper_bound(it->first))
{
auto ret = clusterNode.equal_range(it->first);
for (auto it1=ret.first; it1!=ret.second; ++it1)
{
nodeCluster.insert(std::make_pair<dtPolyRef, dtPolyRef>(it1->second, nodeId));
parentGraph.AddParent(it1->second, nodeId);
}
nodeId++;
}
//create memory for nodes in graph
clusterNode.clear();
currentGraph.Init(nodeId);
for(auto it = nodeCluster.begin(); it != nodeCluster.end(); ++it )
{
clusterNode.insert(std::make_pair<dtPolyRef, dtPolyRef>(it->second, it->first));
}
dtFree(xadj);
dtFree(adjncy);
dtFree(part);
}
void Navigation::buildNodes()
{
//nodes
for(auto it = clusterNode.begin(), end = clusterNode.end(); it != end; it = clusterNode.upper_bound(it->first))
{
currentGraph.AddNode(it->first);
}
}
void Navigation::buildEdges()
{
std::multimap<std::pair<dtPolyRef,dtPolyRef>, std::pair<int ,float*>> portals;
std::vector<dtPolyRef> subgraphNodes;
dtPolyRef clusterId = 0;
dtPolyRef neighbourClusterId = 0;
// create memory for edges
int numEdgesNode = 0;
for(auto it = clusterNode.begin(), end = clusterNode.end(); it != end; it = clusterNode.upper_bound(it->first))
{
clusterId = it->first;
auto ret = clusterNode.equal_range(clusterId);
for (auto it1=ret.first; it1!=ret.second; ++it1)
{
Graph::Node * node = &parentGraph.nodes[it1->second];
for(unsigned int i=0; i < node->numEdges; i++)
{
Graph::Node *neighbour = &parentGraph.nodes[node->edges[i].targetNodeId];
neighbourClusterId = nodeCluster.find(neighbour->idNode)->second;
if(neighbourClusterId == clusterId)
continue;
Graph::Node *tempNode = &graphs[numLevel].nodes[node->idNode];
for(unsigned int j=0; j < tempNode->numEdges; j++)
{
if(tempNode->edges[j].idPos == node->edges[i].idPos)
{
numEdgesNode++;
}
}
}
}
if(numEdgesNode > 0)
{
currentGraph.InitEdge(clusterId, numEdgesNode);
numEdgesNode = 0;
}
}
//create edges
for(auto it = clusterNode.begin(), end = clusterNode.end(); it != end; it = clusterNode.upper_bound(it->first))
{
clusterId = it->first;
//group of nodes
auto ret = clusterNode.equal_range(clusterId);
//for each node in the cluster
for (auto it1=ret.first; it1!=ret.second; ++it1)
{
Graph::Node * node = &parentGraph.nodes[it1->second];
//detect portal nodes between two clusters
for(unsigned int i=0; i < node->numEdges; i++)
{
Graph::Node *neighbour = &parentGraph.nodes[node->edges[i].targetNodeId];
neighbourClusterId = nodeCluster.find(neighbour->idNode)->second;
//add subgraph nodes of thecluster
if(std::find(subgraphNodes.begin(), subgraphNodes.end(), node->edges[i].idPos)==subgraphNodes.end())
{
subgraphNodes.push_back(node->edges[i].idPos);
}
if(neighbourClusterId == clusterId)
continue;
//add edge
Graph::Node *tempNode = &graphs[numLevel].nodes[node->idNode];
for(unsigned int j=0; j < tempNode->numEdges; j++)
{
if(tempNode->edges[j].idPos == node->edges[i].idPos)
{
currentGraph.AddEdge(clusterId,neighbourClusterId, node->edges[i].pos, node->edges[i].idPos,tempNode->edges[i].idPoly);
break;
}
}
//portals
std::pair<dtPolyRef,dtPolyRef> p = std::make_pair(node->idNode, neighbour->idNode);
std::pair<int,float*> p1 = std::make_pair(node->edges[i].idPos, node->edges[i].pos);
portals.insert(std::make_pair(p, p1));
}
}
Graph::Node *currentNode = ¤tGraph.nodes[clusterId];
currentNode->InitIntraEdge();
//add intraedges
for(auto it1 = portals.begin(); it1 != portals.end(); ++it1 )
{
std::pair<dtPolyRef,dtPolyRef> p = it1->first;
dtPolyRef idNode = p.first;
std::pair<int,float *> p1 = it1->second;
float nodePos[3];
int idPos = p1.first;
dtVcopy(nodePos, p1.second);
//intraedges
for(auto it2 = portals.begin(); it2 != portals.end(); ++it2 )
{
if(it1==it2)
continue;
std::pair<dtPolyRef,dtPolyRef> p2 = it2->first;
dtPolyRef idNeighbour = p2.first;
float neighbourPos[3];
std::pair<int,float*> p3 = it2->second;
int neiughbourIdPos = p3.first;
dtVcopy(neighbourPos, p3.second);
dtPolyRef m_polys[MAX_POLYS];
int m_npolys;
float cost = findPathLocal(idNode, idNeighbour, idPos, neiughbourIdPos, nodePos,neighbourPos,m_polys,m_npolys,MAX_POLYS,subgraphNodes);
currentGraph.AddIntraEdge(clusterId, idPos, neiughbourIdPos, cost, m_polys, m_npolys);
}
}
subgraphNodes.clear();
portals.clear();
}
}
float Navigation::findPath(dtPolyRef startRef, dtPolyRef endRef,const float* startPos, const float* endPos, dtPolyRef* path, int &pathCount, const int maxPath)
{
float cost = 0.0f;
dtAssert(m_nodePool);
dtAssert(m_openList);
pathCount = 0;
m_nodePool->clear();
m_openList->clear();
dtNode* startNode = m_nodePool->getNode(startRef);
dtVcopy(startNode->pos, startPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = dtVdist(startPos, endPos) * H_SCALE;
startNode->id = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
dtNode* lastBestNode = startNode;
float lastBestNodeCost = startNode->total;
while (!m_openList->empty())
{
// Remove node from open list and put it in closed list.
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Reached the goal, stop searching.
if (bestNode->id == endRef)
{
lastBestNode = bestNode;
break;
}
Graph::Node* node = &mainGraph.nodes[bestNode->id];
// Get parent poly and tile.
dtPolyRef parentRef = 0;
if (bestNode->pidx)
parentRef = m_nodePool->getNodeAtIdx(bestNode->pidx)->id;
for (unsigned int i=0; i < node->numEdges; i++)
{
dtPolyRef neighbourRef = node->edges[i].targetNodeId;
// Skip invalid ids and do not expand back to where we came from.
if (!neighbourRef || neighbourRef == parentRef)
continue;
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef);
if (!neighbourNode)
{
continue;
}
// If the node is visited the first time, calculate node position.
if (neighbourNode->flags == 0)
{
dtVcopy(neighbourNode->pos, node->edges[i].pos);
}
// Calculate cost and heuristic.
float cost = 0.0f;
float heuristic = 0.0f;
if (neighbourRef == endRef)
{
const float curCost = dtVdist(bestNode->pos, neighbourNode->pos);
const float endCost = dtVdist(bestNode->pos, endPos);
cost = bestNode->cost + curCost + endCost;
heuristic = 0.0f;
}
else
{
const float curCost = dtVdist(bestNode->pos, neighbourNode->pos);
cost = bestNode->cost + curCost;
heuristic = dtVdist(neighbourNode->pos, endPos)*H_SCALE;
}
const float total = cost + heuristic;
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
// The node is already visited and process, and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_CLOSED) && total >= neighbourNode->total)
continue;
// Add or update the node.
neighbourNode->pidx = m_nodePool->getNodeIdx(bestNode);
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~DT_NODE_CLOSED);
neighbourNode->cost = cost;
neighbourNode->total = total;
if (neighbourNode->flags & DT_NODE_OPEN)
{
// Already in open, update node location.
m_openList->modify(neighbourNode);
}
else
{
// Put the node in open list.
neighbourNode->flags |= DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
// Update nearest node to target so far.
if (heuristic < lastBestNodeCost)
{
lastBestNodeCost = heuristic;
lastBestNode = neighbourNode;
}
}
}
// Reverse the path.
dtNode* prev = 0;
dtNode* node = lastBestNode;
cost = lastBestNode->cost;
do
{
dtNode* next = m_nodePool->getNodeAtIdx(node->pidx);
node->pidx = m_nodePool->getNodeIdx(prev);
prev = node;
node = next;
}
while (node);
// Store path
node = prev;
int n = 0;
do
{
path[n++] = node->id + refBase;
if (n >= maxPath)
{
break;
}
node = m_nodePool->getNodeAtIdx(node->pidx);
}
while (node);
pathCount = n;
return cost;
}
float Navigation::findPathLocal(dtPolyRef startRef, dtPolyRef endRef,int startIdPos, int endIdPos, const float* startPos, const float* endPos, dtPolyRef* path, int &pathCount, const int maxPath, std::vector<dtPolyRef> subGraphNodes)
{
dtAssert(m_nodePool);
dtAssert(m_openList);
float cost = 0.0f;
pathCount = 0;
if (!maxPath)
return cost;
m_nodePool->clear();
m_openList->clear();
//get start and goal node
startRef = parentGraph.nodes[startRef].idNode;
endRef = parentGraph.nodes[endRef].idNode;
if (startRef == endRef)
{
if(numLevel == 0)
path[0] = startRef + refBase;
else
path[0] = startRef;
pathCount = 1;
cost = getCost(&parentGraph.nodes[startRef], startIdPos,endIdPos, startPos, endPos);
return cost;
}
dtNode* startNode = m_nodePool->getNode(startIdPos);
dtVcopy(startNode->pos, startPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = dtVdist(startPos, endPos) * H_SCALE;
startNode->id = startIdPos;
startNode->idPos = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
dtNode* lastBestNode = startNode;
float lastBestNodeCost = startNode->total;
while (!m_openList->empty())
{
// Remove node from open list and put it in closed list.
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Reached the goal, stop searching.
if (bestNode->id == endIdPos)
{
lastBestNode = bestNode;
break;
}
Graph::Node* node = &parentGraph.nodes[bestNode->idPos];
// Get parent poly and tile.
dtPolyRef parentRef = 0;
if (bestNode->pidx)
parentRef = m_nodePool->getNodeAtIdx(bestNode->pidx)->id;
for(unsigned int i=0; i < node->numEdges; i++)
{
dtPolyRef neighbourRef = node->edges[i].idPos;
if(!subGraphNodes.empty())
{
if(std::find(subGraphNodes.begin(), subGraphNodes.end(), neighbourRef)==subGraphNodes.end())
continue;
}
if (neighbourRef == parentRef)
continue;
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef);
if(bestNode->id ==neighbourNode->id)
continue;
// If the node is visited the first time, calculate node position.
if (neighbourNode->flags == 0)
{
dtVcopy(neighbourNode->pos, node->edges[i].pos);
}
// Calculate cost and heuristic.
float heuristic = 0.0f;
// Special case for last node.
if (neighbourRef == endIdPos)
{
const float curCost = getCost(node, bestNode->id, neighbourNode->id, bestNode->pos, neighbourNode->pos);
Graph::Node* eNode = &parentGraph.nodes[endRef];
const float endCost = getCost(eNode, neighbourNode->id, endIdPos, neighbourNode->pos, endPos);
cost = bestNode->cost + curCost + endCost;
heuristic = 0.0f;
}
else
{
//const float curCost = dtVdist(bestNode->pos, neighbourNode->pos);
const float curCost = getCost(node, bestNode->id, neighbourNode->id,bestNode->pos, neighbourNode->pos);
cost = bestNode->cost + curCost;
heuristic = dtVdist(neighbourNode->pos, endPos)*H_SCALE;
}
const float total = cost + heuristic;
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
// The node is already visited and process, and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_CLOSED) && total >= neighbourNode->total)
continue;
// Add or update the node.
neighbourNode->pidx = m_nodePool->getNodeIdx(bestNode);
neighbourNode->idPos = node->edges[i].targetNodeId;
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~DT_NODE_CLOSED);
neighbourNode->cost = cost;
neighbourNode->total = total;
if (neighbourNode->flags & DT_NODE_OPEN)
{
// Already in open, update node location.
m_openList->modify(neighbourNode);
}
else
{
// Put the node in open list.
neighbourNode->flags |= DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
// Update nearest node to target so far.
if (heuristic < lastBestNodeCost)
{
lastBestNodeCost = heuristic;
lastBestNode = neighbourNode;
}
}
}
// Reverse the path.
dtNode* prev = 0;
dtNode* node = lastBestNode;
cost = lastBestNode->cost;
do
{
dtNode* next = m_nodePool->getNodeAtIdx(node->pidx);
node->pidx = m_nodePool->getNodeIdx(prev);
prev = node;
node = next;
}
while (node);
// Store path
node = prev;
int n = 0;
do
{
if(numLevel == 0)
path[n++] = node->idPos + refBase;
else
path[n++] = node->idPos;
node = m_nodePool->getNodeAtIdx(node->pidx);
}
while (node);
pathCount = n-1;
return cost;
}
void Navigation::setGraph()
{
if( level < numGraphs)
{
currentGraph =graphs[level];
}
else
{
currentGraph =graphs[0];
}
}
Graph::Node *Navigation::getNode(dtPolyRef ref, int l)
{
if(l == level)
return &graphs[l].nodes[ref];
Graph::Node *node = &graphs[l].nodes[ref];
if(node->edges <= 0)
return NULL;
l++;
getNode(node->idParent,l);
}
void Navigation::checkPartition(int* part, const int numNodes, const int numParts)
{
int* newPart = (int*) dtAlloc( sizeof(int)*numNodes, DT_ALLOC_TEMP );
bool* used = (bool*) dtAlloc( sizeof(bool)*numParts, DT_ALLOC_TEMP );
int nextPart = numParts;
memset(used, 0, sizeof(bool)*numParts );
for(int i=0; i < numNodes; i++)
newPart[i] = -1;
for(int i=0; i < numNodes; i++)
{
if( newPart[i] != -1 )
continue;
if( !used[ part[i] ] )
{
newPart[i] = part[i];
used[ part[i] ] = true;
}
else
{
newPart[i] = nextPart++;
}
explorePartition(i, newPart, part);
}
memcpy(part, newPart, sizeof(int)*numNodes );
dtFree(newPart );
dtFree( used );
}
void Navigation::explorePartition(int idNode, int* newPart, int* part)
{
Graph::Node *node = &parentGraph.nodes[idNode];
for(unsigned int i=0; i < node->numEdges; i++)
{
dtPolyRef neighbourRef = node->edges[i].targetNodeId;
Graph::Node *neighbour = &parentGraph.nodes[neighbourRef];
int neighbourID = neighbour->idNode;
if( neighbourID == -1 )
continue;
if( part[idNode] != part[ neighbourID ] )
continue;
if( newPart[neighbourID] != -1 )
continue;
newPart[neighbourID] = newPart[idNode];
explorePartition(neighbourID, newPart, part);
}
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/onmetis.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* onmetis.c
*
* This file contains the driving routine for multilevel method
*
* Started 8/28/94
* George
*
* $Id: onmetis.c,v 1.2 2002/08/10 06:02:53 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, options[10];
idxtype *perm, *iperm;
GraphType graph;
char filename[256];
idxtype numflag = 0, wgtflag;
double TOTALTmr, METISTmr, IOTmr, SMBTmr;
if (argc != 2) {
mprintf("Usage: %s <GraphFile>\n",argv[0]);
exit(0);
}
strcpy(filename, argv[1]);
gk_clearcputimer(TOTALTmr);
gk_clearcputimer(METISTmr);
gk_clearcputimer(IOTmr);
gk_clearcputimer(SMBTmr);
gk_startcputimer(TOTALTmr);
gk_startcputimer(IOTmr);
ReadGraph(&graph, filename, &wgtflag);
if (graph.nvtxs <= 0) {
mprintf("Empty graph. Nothing to do.\n");
exit(0);
}
if (graph.ncon != 1) {
mprintf("Ordering can only be applied to graphs with one constraint.\n");
exit(0);
}
gk_stopcputimer(IOTmr);
/* Ordering does not use weights! */
gk_free((void **)&graph.vwgt, &graph.adjwgt, LTERM);
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Graph Information ---------------------------------------------------\n");
mprintf(" Name: %s, #Vertices: %D, #Edges: %D\n\n", filename, graph.nvtxs, graph.nedges/2);
mprintf("Node-Based Ordering... ----------------------------------------------\n");
perm = idxmalloc(graph.nvtxs, "main: perm");
iperm = idxmalloc(graph.nvtxs, "main: iperm");
options[0] = 0;
gk_startcputimer(METISTmr);
METIS_NodeND(&graph.nvtxs, graph.xadj, graph.adjncy, &numflag, options, perm, iperm);
gk_stopcputimer(METISTmr);
gk_startcputimer(IOTmr);
WritePermutation(filename, iperm, graph.nvtxs);
gk_stopcputimer(IOTmr);
gk_startcputimer(SMBTmr);
ComputeFillIn(&graph, iperm);
gk_stopcputimer(SMBTmr);
gk_stopcputimer(TOTALTmr);
mprintf("\nTiming Information --------------------------------------------------\n");
mprintf(" I/O: \t %7.3f\n", gk_getcputimer(IOTmr));
mprintf(" Ordering: \t %7.3f (ONMETIS time)\n", gk_getcputimer(METISTmr));
mprintf(" Symbolic Factorization: \t %7.3f\n", gk_getcputimer(SMBTmr));
mprintf(" Total: \t %7.3f\n", gk_getcputimer(TOTALTmr));
mprintf("**********************************************************************\n");
gk_free((void **)&graph.xadj, &graph.adjncy, &perm, &iperm, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/Makefile<|end_filename|>
default:
more Makefile.help
all:
$(MAKE) -C GKlib/trunk
$(MAKE) -C libmetis
$(MAKE) -C programs
$(MAKE) -C test
clean:
$(MAKE) -C GKlib/trunk clean
$(MAKE) -C libmetis clean
$(MAKE) -C programs clean
$(MAKE) -C test clean
realclean:
$(MAKE) -C GKlib/trunk realclean
$(MAKE) -C libmetis realclean
$(MAKE) -C programs realclean
$(MAKE) -C test realclean
dist:
$(MAKE) -C dist dist
<|start_filename|>RecastDemo/Contrib/metis/source/programs/cepic-rcb.c<|end_filename|>
/*
* Copyright 2003, Regents of the University of Minnesota
*
* cepic.c
*
* This file contains the driving routine for contact/impact simulations
* for EPIC meshes
*
* Started 4/12/03
* George
*
* $Id: cepic-rcb.c,v 1.3 2003/05/03 16:10:48 karypis Exp $
*
*/
#include <metisbin.h>
#define Flip_int32(type) (((type >>24) & 0x000000ff) | \
((type >> 8) & 0x0000ff00) | \
((type << 8) & 0x00ff0000) | \
((type <<24) & 0xff000000) )
#define Flip_int64(type) (((type >>56) & 0x00000000000000ff) | \
((type >>40) & 0x000000000000ff00) | \
((type >>24) & 0x0000000000ff0000) | \
((type >>8) & 0x00000000ff000000) | \
((type <<8) & 0x000000ff00000000) | \
((type <<24) & 0x0000ff0000000000) | \
((type <<40) & 0x00ff000000000000) | \
((type <<56) & 0xff00000000000000))
int ComputeMapCost(idxtype nvtxs, idxtype nparts, idxtype *fepart, idxtype *cpart);
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, j, istep, options[10], nn, ne, fstep, lstep, nparts, nboxes, u[3], dim, nchanges, ncomm;
char filename[256];
idxtype *mien, *mrng, *part, *oldpart, *sflag, *bestdims, *fepart;
double *mxyz, *bxyz;
idxtype *xadj, *adjncy, *cntptr, *cntind;
idxtype numflag = 0, wgtflag = 0, edgecut, etype=2;
void *cinfo;
FILE *fpin;
long long int *ltmp;
if (argc != 6) {
mfprintf(stderr, "Usage: %s <nn> <ne> <fstep> <lstep> <nparts>\n", argv[0]);
exit(0);
}
nn = atoi(argv[1]);
ne = atoi(argv[2]);
fstep = atoi(argv[3]);
lstep = atoi(argv[4]);
nparts = atoi(argv[5]);
mprintf("Reading %s, nn: %D, ne: %D, fstep: %D, lstep: %D, nparts: %D\n", filename, nn, ne, fstep, lstep, nparts);
mien = idxmalloc(4*ne, "main: mien");
mxyz = gk_dmalloc(3*nn, "main: mxyz");
mrng = idxmalloc(4*ne, "main: mrng");
bxyz = gk_dmalloc(6*ne*4, "main: bxyz");
fepart = idxmalloc(nn, "main: fepart");
part = idxmalloc(nn, "main: part");
oldpart = idxmalloc(nn, "main: oldpart");
sflag = idxmalloc(nn, "main: sflag");
bestdims = idxsmalloc(2*nparts, -1, "main: bestdims");
xadj = idxmalloc(nn+1, "main: xadj");
adjncy = idxmalloc(50*nn, "main: adjncy");
/*========================================================================
* Read the initial mesh and setup the graph and contact information
*========================================================================*/
msprintf(filename, "mien.%04D", fstep);
fpin = GKfopen(filename, "rb", "main: mien");
fread(mien, sizeof(int), 4*ne, fpin);
for (i=0; i<4*ne; i++)
mien[i] = Flip_int32(mien[i]);
GKfclose(fpin);
msprintf(filename, "mxyz.%04D", fstep);
fpin = GKfopen(filename, "rb", "main: mxyz");
fread(mxyz, sizeof(double), 3*nn, fpin);
for (i=0; i<3*nn; i++) {
ltmp = (long long int *)(mxyz+i);
*ltmp = Flip_int64(*ltmp);
}
GKfclose(fpin);
mprintf("%e %e %e\n", mxyz[3*0+0], mxyz[3*0+1], mxyz[3*0+2]);
msprintf(filename, "mrng.%04D", fstep);
fpin = GKfopen(filename, "rb", "main: mrng");
fread(mrng, sizeof(int), 4*ne, fpin);
for (i=0; i<4*ne; i++)
mrng[i] = Flip_int32(mrng[i]);
GKfclose(fpin);
/*========================================================================
* Determine which nodes are in the surface
*========================================================================*/
iset(nn, 0, sflag);
for (i=0; i<ne; i++) {
if (mrng[4*i+0] > 0) { /* 1, 2, 3 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
}
if (mrng[4*i+1] > 0) { /* 1, 2, 4 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
if (mrng[4*i+2] > 0) { /* 2, 3, 4 */
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
if (mrng[4*i+3] > 0) { /* 1, 3, 4 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
}
mprintf("Contact Nodes: %D of %D\n", isum(nn, sflag), nn);
/*========================================================================
* Compute the FE partition
*========================================================================*/
numflag = mien[idxargmin(4*ne, mien)];
METIS_MeshToNodal(&ne, &nn, mien, &etype, &numflag, xadj, adjncy);
options[0] = 0;
METIS_PartGraphVKway(&nn, xadj, adjncy, NULL, NULL, &wgtflag, &numflag, &nparts,
options, &edgecut, fepart);
mprintf("K-way partitioning Volume: %D\n", edgecut);
/*========================================================================
* Get into the loop in which you go over the different configurations
*========================================================================*/
for (istep=fstep; istep<=lstep; istep++) {
msprintf(filename, "mxyz.%04D", istep);
mprintf("Reading %s...............................................................\n", filename);
fpin = GKfopen(filename, "rb", "main: mxyz");
fread(mxyz, sizeof(double), 3*nn, fpin);
for (i=0; i<3*nn; i++) {
ltmp = (long long int *)(mxyz+i);
*ltmp = Flip_int64(*ltmp);
}
GKfclose(fpin);
msprintf(filename, "mrng.%04D", istep);
fpin = GKfopen(filename, "rb", "main: mrng");
fread(mrng, sizeof(int), 4*ne, fpin);
for (i=0; i<4*ne; i++)
mrng[i] = Flip_int32(mrng[i]);
GKfclose(fpin);
/* Determine which nodes are in the surface */
iset(nn, 0, sflag);
for (i=0; i<ne; i++) {
if (mrng[4*i+0] > 0) { /* 1, 2, 3 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
}
if (mrng[4*i+1] > 0) { /* 1, 2, 4 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
if (mrng[4*i+2] > 0) { /* 2, 3, 4 */
sflag[mien[4*i+1]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
if (mrng[4*i+3] > 0) { /* 1, 3, 4 */
sflag[mien[4*i+0]-1] = 1;
sflag[mien[4*i+2]-1] = 1;
sflag[mien[4*i+3]-1] = 1;
}
}
mprintf("Contact Nodes: %D of %D\n", isum(nn, sflag), nn);
/* Determine the bounding boxes of the surface elements */
for (nboxes=0, i=0; i<ne; i++) {
if (mrng[4*i+0] > 0) { /* 1, 2, 3 */
u[0] = mien[4*i+0]-1;
u[1] = mien[4*i+1]-1;
u[2] = mien[4*i+2]-1;
bxyz[6*nboxes+0] = bxyz[6*nboxes+3] = mxyz[3*u[0]+0];
bxyz[6*nboxes+1] = bxyz[6*nboxes+4] = mxyz[3*u[0]+1];
bxyz[6*nboxes+2] = bxyz[6*nboxes+5] = mxyz[3*u[0]+2];
for (j=1; j<3; j++) {
for (dim=0; dim<3; dim++) {
bxyz[6*nboxes+dim] = (bxyz[6*nboxes+dim] > mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+dim]);
bxyz[6*nboxes+3+dim] = (bxyz[6*nboxes+3+dim] < mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+3+dim]);
}
}
nboxes++;
}
if (mrng[4*i+1] > 0) { /* 1, 2, 4 */
u[0] = mien[4*i+0]-1;
u[1] = mien[4*i+1]-1;
u[2] = mien[4*i+3]-1;
bxyz[6*nboxes+0] = bxyz[6*nboxes+3] = mxyz[3*u[0]+0];
bxyz[6*nboxes+1] = bxyz[6*nboxes+4] = mxyz[3*u[0]+1];
bxyz[6*nboxes+2] = bxyz[6*nboxes+5] = mxyz[3*u[0]+2];
for (j=1; j<3; j++) {
for (dim=0; dim<3; dim++) {
bxyz[6*nboxes+dim] = (bxyz[6*nboxes+dim] > mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+dim]);
bxyz[6*nboxes+3+dim] = (bxyz[6*nboxes+3+dim] < mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+3+dim]);
}
}
nboxes++;
}
if (mrng[4*i+2] > 0) { /* 2, 3, 4 */
u[0] = mien[4*i+1]-1;
u[1] = mien[4*i+2]-1;
u[2] = mien[4*i+3]-1;
bxyz[6*nboxes+0] = bxyz[6*nboxes+3] = mxyz[3*u[0]+0];
bxyz[6*nboxes+1] = bxyz[6*nboxes+4] = mxyz[3*u[0]+1];
bxyz[6*nboxes+2] = bxyz[6*nboxes+5] = mxyz[3*u[0]+2];
for (j=1; j<3; j++) {
for (dim=0; dim<3; dim++) {
bxyz[6*nboxes+dim] = (bxyz[6*nboxes+dim] > mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+dim]);
bxyz[6*nboxes+3+dim] = (bxyz[6*nboxes+3+dim] < mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+3+dim]);
}
}
nboxes++;
}
if (mrng[4*i+3] > 0) { /* 1, 3, 4 */
u[0] = mien[4*i+0]-1;
u[1] = mien[4*i+2]-1;
u[2] = mien[4*i+3]-1;
bxyz[6*nboxes+0] = bxyz[6*nboxes+3] = mxyz[3*u[0]+0];
bxyz[6*nboxes+1] = bxyz[6*nboxes+4] = mxyz[3*u[0]+1];
bxyz[6*nboxes+2] = bxyz[6*nboxes+5] = mxyz[3*u[0]+2];
for (j=1; j<3; j++) {
for (dim=0; dim<3; dim++) {
bxyz[6*nboxes+dim] = (bxyz[6*nboxes+dim] > mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+dim]);
bxyz[6*nboxes+3+dim] = (bxyz[6*nboxes+3+dim] < mxyz[3*u[j]+dim] ? mxyz[3*u[j]+dim] : bxyz[6*nboxes+3+dim]);
}
}
nboxes++;
}
}
cinfo = METIS_PartSurfForContactRCB(&nn, mxyz, sflag, &nparts, part, bestdims);
METIS_FindContacts(cinfo, &nboxes, bxyz, &nparts, &cntptr, &cntind);
METIS_FreeContactInfo(cinfo);
nchanges = 0;
if (istep > fstep) {
for (i=0; i<nn; i++)
nchanges += (part[i] != oldpart[i] ? 1 : 0);
}
idxcopy(nn, part, oldpart);
ncomm = ComputeMapCost(nn, nparts, fepart, part);
mprintf("Contacting Elements: %D Indices: %D Nchanges: %D MapCost: %D\n", nboxes, cntptr[nboxes]-nboxes, nchanges, ncomm);
gk_free((void **)&cntptr, &cntind, LTERM);
}
}
/***********************************************************************************
* This function determines the cost of moving data between the two meshes assuming
* that a good matching between the two partitions was done!
************************************************************************************/
int ComputeMapCost(idxtype nvtxs, idxtype nparts, idxtype *fepart, idxtype *cpart)
{
idxtype i, j, k, n, ncomm;
KeyValueType cand[nparts*nparts];
idxtype fmatched[nparts], cmatched[nparts];
/* Compute the overlap */
for (i=0; i<nparts; i++) {
for (j=0; j<nparts; j++) {
cand[i*nparts+j].key = 0;
cand[i*nparts+j].val = i*nparts+j;
}
}
for (k=0, i=0; i<nvtxs; i++) {
if (cpart[i] >= 0) {
cand[(fepart[i]-1)*nparts+(cpart[i]-1)].key++;
k++;
}
}
mprintf("Contact points: %D\n", k);
ikeysort(nparts*nparts, cand);
iset(nparts, -1, fmatched);
iset(nparts, -1, cmatched);
for (ncomm=0, k=nparts*nparts-1; k>=0; k--) {
i = cand[k].val/nparts;
j = cand[k].val%nparts;
if (fmatched[i] == -1 && cmatched[j] == -1) {
fmatched[i] = j;
cmatched[j] = i;
}
else
ncomm += cand[k].key;
}
mprintf("Ncomm: %D\n", ncomm);
return ncomm;
}
<|start_filename|>RecastDemo/Contrib/metis/source/programs/io.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* io.c
*
* This file contains routines related to I/O
*
* Started 8/28/94
* George
*
* $Id: io.c,v 1.7 2003/04/13 04:45:14 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* This function reads the spd matrix
**************************************************************************/
void ReadGraph(GraphType *graph, char *filename, idxtype *wgtflag)
{
idxtype i, j, k, l, fmt, readew, readvw, ncon, edge, ewgt;
idxtype *xadj, *adjncy, *vwgt, *adjwgt;
char *line, *oldstr, *newstr;
FILE *fpin;
InitGraph(graph);
line = gk_cmalloc(MAXLINE+1, "ReadGraph: line");
fpin = gk_fopen(filename, "r", __func__);
do {
fgets(line, MAXLINE, fpin);
} while (line[0] == '%' && !feof(fpin));
if (feof(fpin)) {
graph->nvtxs = 0;
gk_free((void **)&line, LTERM);
return;
}
fmt = ncon = 0;
msscanf(line, "%D %D %D %D", &(graph->nvtxs), &(graph->nedges), &fmt, &ncon);
readew = (fmt%10 > 0);
readvw = ((fmt/10)%10 > 0);
if (fmt >= 100) {
mprintf("Cannot read this type of file format!");
exit(0);
}
*wgtflag = 0;
if (readew)
*wgtflag += 1;
if (readvw)
*wgtflag += 2;
if (ncon > 0 && !readvw) {
mprintf("------------------------------------------------------------------------------\n");
mprintf("*** I detected an error in your input file ***\n\n");
mprintf("You specified ncon=%D, but the fmt parameter does not specify vertex weights\n", ncon);
mprintf("Make sure that the fmt parameter is set to either 10 or 11.\n");
mprintf("------------------------------------------------------------------------------\n");
exit(0);
}
graph->nedges *=2;
ncon = graph->ncon = (ncon == 0 ? 1 : ncon);
/*mprintf("%D %D %D %D %D [%D %D]\n", fmt, fmt%10, (fmt/10)%10, ncon, graph->ncon, readew, readvw);*/
if (graph->nvtxs > MAXIDX)
errexit("\nThe matrix is too big: %d [%d %d]\n", graph->nvtxs, MAXIDX, sizeof(idxtype));
xadj = graph->xadj = idxsmalloc(graph->nvtxs+1, 0, "ReadGraph: xadj");
adjncy = graph->adjncy = idxmalloc(graph->nedges, "ReadGraph: adjncy");
vwgt = graph->vwgt = (readvw ? idxmalloc(ncon*graph->nvtxs, "ReadGraph: vwgt") : NULL);
adjwgt = graph->adjwgt = (readew ? idxmalloc(graph->nedges, "ReadGraph: adjwgt") : NULL);
/* Start reading the graph file */
for (xadj[0]=0, k=0, i=0; i<graph->nvtxs; i++) {
do {
fgets(line, MAXLINE, fpin);
} while (line[0] == '%' && !feof(fpin));
oldstr = line;
newstr = NULL;
if (strlen(line) == MAXLINE)
errexit("\nBuffer for fgets not big enough!\n");
if (readvw) {
for (l=0; l<ncon; l++) {
vwgt[i*ncon+l] = strtoidx(oldstr, &newstr, 10);
oldstr = newstr;
}
}
for (;;) {
edge = strtoidx(oldstr, &newstr, 10) -1;
oldstr = newstr;
if (readew) {
ewgt = strtoidx(oldstr, &newstr, 10);
oldstr = newstr;
}
if (edge < 0)
break;
adjncy[k] = edge;
if (readew)
adjwgt[k] = ewgt;
k++;
}
xadj[i+1] = k;
}
gk_fclose(fpin);
if (k != graph->nedges) {
mprintf("------------------------------------------------------------------------------\n");
mprintf("*** I detected an error in your input file ***\n\n");
mprintf("In the first line of the file, you specified that the graph contained\n%D edges. However, I only found %D edges in the file.\n", graph->nedges/2, k/2);
if (2*k == graph->nedges) {
mprintf("\n *> I detected that you specified twice the number of edges that you have in\n");
mprintf(" the file. Remember that the number of edges specified in the first line\n");
mprintf(" counts each edge between vertices v and u only once.\n\n");
}
mprintf("Please specify the correct number of edges in the first line of the file.\n");
mprintf("------------------------------------------------------------------------------\n");
exit(0);
}
gk_free((void **)&line, LTERM);
}
/*************************************************************************
* This function reads the spd matrix
**************************************************************************/
void ReadCoordinates(GraphType *graph, char *filename)
{
idxtype i, j, k, l, nvtxs, fmt, readew, readvw, ncon, edge, ewgt;
FILE *fpin;
char *line;
fpin = gk_fopen(filename, "r", __func__);
nvtxs = graph->nvtxs;
graph->coords = gk_dsmalloc(3*nvtxs, 0.0, "ReadCoordinates: coords");
line = gk_cmalloc(MAXLINE+1, "ReadCoordinates: line");
for (i=0; i<nvtxs; i++) {
fgets(line, MAXLINE, fpin);
msscanf(line, "%lf %lf %lf", graph->coords+3*i+0, graph->coords+3*i+1, graph->coords+3*i+2);
}
gk_fclose(fpin);
gk_free((void **)&line, LTERM);
}
/*************************************************************************
* This function writes out the partition vector
**************************************************************************/
void WritePartition(char *fname, idxtype *part, idxtype n, idxtype nparts)
{
FILE *fpout;
idxtype i;
char filename[256];
msprintf(filename,"%s.part.%D",fname, nparts);
fpout = gk_fopen(filename, "w", __func__);
for (i=0; i<n; i++)
fprintf(fpout,"%" PRIIDX "\n", part[i]);
gk_fclose(fpout);
}
/*************************************************************************
* This function writes out the partition vectors for a mesh
**************************************************************************/
void WriteMeshPartition(char *fname, idxtype nparts, idxtype ne, idxtype *epart,
idxtype nn, idxtype *npart)
{
FILE *fpout;
idxtype i;
char filename[256];
msprintf(filename,"%s.epart.%D",fname, nparts);
fpout = gk_fopen(filename, "w", __func__);
for (i=0; i<ne; i++)
fprintf(fpout,"%" PRIIDX "\n", epart[i]);
gk_fclose(fpout);
msprintf(filename,"%s.npart.%D",fname, nparts);
fpout = gk_fopen(filename, "w", __func__);
for (i=0; i<nn; i++)
fprintf(fpout, "%" PRIIDX "\n", npart[i]);
gk_fclose(fpout);
}
/*************************************************************************
* This function writes out the partition vector
**************************************************************************/
void WritePermutation(char *fname, idxtype *iperm, idxtype n)
{
FILE *fpout;
idxtype i;
char filename[256];
msprintf(filename,"%s.iperm",fname);
fpout = gk_fopen(filename, "w", __func__);
for (i=0; i<n; i++)
fprintf(fpout, "%" PRIIDX "\n", iperm[i]);
gk_fclose(fpout);
}
/*************************************************************************
* This function checks if a graph is valid
**************************************************************************/
int CheckGraph(GraphType *graph)
{
idxtype i, j, k, l, nvtxs, err=0;
idxtype *xadj, *adjncy, *adjwgt;
nvtxs = graph->nvtxs;
xadj = graph->xadj;
adjncy = graph->adjncy;
adjwgt = graph->adjwgt;
for (i=0; i<nvtxs; i++) {
for (j=xadj[i]; j<xadj[i+1]; j++) {
k = adjncy[j];
if (i == k) {
mprintf("Vertex %D contains a self-loop (i.e., diagonal entry in the matrix)!\n", i);
err++;
}
else {
for (l=xadj[k]; l<xadj[k+1]; l++) {
if (adjncy[l] == i) {
if (adjwgt != NULL && adjwgt[l] != adjwgt[j]) {
mprintf("Edges (%D %D) and (%D %D) do not have the same weight! %D %D\n", i,k,k,i, adjwgt[l], adjwgt[adjncy[j]]);
err++;
}
break;
}
}
if (l == xadj[k+1]) {
mprintf("Missing edge: (%D %D)!\n", k, i);
err++;
}
}
}
}
if (err > 0)
mprintf("A total of %D errors exist in the input file. Correct them, and run again!\n", err);
return (err == 0 ? 1 : 0);
}
/****************************************************************************
* This function detect the input mesh type
***************************************************************************/
int MeshType(char *filename)
{
int i, j, k, l, len, cnt=0;
FILE *fpin;
char temp[40], inpt[80];
int firstline[3];
fpin = gk_fopen(filename, "r", __func__);
mfscanf(fpin,"%[^\n]s", inpt);
gk_fclose(fpin);
len = strlen(inpt);
i=0;k=0;
while (inpt[i]==' ') i++;
while (i<=len) {
if (inpt[i]==' ' || i==len) {
l=0;
for (j=k; j<i;j++ )
temp[l++]=inpt[j];
temp[l]='\0';
firstline[cnt++] = atoi(temp);
while (inpt[i]==' ') i++;
k=i;
if (i==len) break;
}
else
i++;
}
if (cnt==1)
return 0; /*Mixed element without weight */
else if (cnt==2 && firstline[1]>0)
return 1; /*Fixed element without weight*/
else if (cnt==2 && firstline[1]==-1)
return 2; /*Mixed element with weight*/
else if (cnt==3 && firstline[2]==-1)
return 3; /*Fixed element with weight*/
}
/*************************************************************************
* This function reads the element node array of a mesh
**************************************************************************/
idxtype *ReadMesh(char *filename, idxtype *ne, idxtype *nn, idxtype *etype)
{
idxtype i, j, k, esize;
idxtype *elmnts;
FILE *fpin;
fpin = gk_fopen(filename, "r", __func__);
mfscanf(fpin, "%D %D", ne, etype);
switch (*etype) {
case 1:
esize = 3;
break;
case 2:
esize = 4;
break;
case 3:
esize = 8;
break;
case 4:
esize = 4;
break;
case 5:
esize = 2;
break;
default:
errexit("Unknown mesh-element type: %d\n", *etype);
}
elmnts = idxmalloc(esize*(*ne), "ReadMesh: elmnts");
for (j=esize*(*ne), i=0; i<j; i++) {
mfscanf(fpin, "%D", elmnts+i);
elmnts[i]--;
}
gk_fclose(fpin);
*nn = elmnts[idxargmax(j, elmnts)]+1;
return elmnts;
}
/*************************************************************************
* This function reads the element node array of a mesh with weight
**************************************************************************/
idxtype *ReadMeshWgt(char *filename, idxtype *ne, idxtype *nn, idxtype *etype,
idxtype *vwgt)
{
idxtype i, j, k, esize;
idxtype *elmnts;
FILE *fpin;
fpin = gk_fopen(filename, "r", __func__);
mfscanf(fpin, "%D %D", ne, etype);
mfscanf(fpin, "%D", &i);
switch (*etype) {
case 1:
esize = 3;
break;
case 2:
esize = 4;
break;
case 3:
esize = 8;
break;
case 4:
esize = 4;
break;
case 5:
esize = 2;
break;
default:
errexit("Unknown mesh-element type: %d\n", *etype);
}
elmnts = idxmalloc(esize*(*ne), "ReadMeshWgt: elmnts");
for (j=0, i=0; i<*ne; i++) {
mfscanf(fpin, "%D", vwgt+i);
for (k=0; k<esize; k++) {
mfscanf(fpin, "%D", elmnts+j);
elmnts[j++]--;
}
}
gk_fclose(fpin);
*nn = elmnts[idxargmax(j, elmnts)]+1;
return elmnts;
}
/*************************************************************************
* This function reads the weights of each elements
**************************************************************************/
idxtype *ReadWgt(char *filename, idxtype *ne, idxtype *nn, idxtype *etype)
{
idxtype i, j, k, l, esize;
idxtype *vwgt;
FILE *fpin;
fpin = gk_fopen(filename, "r", __func__);
mfscanf(fpin, "%D %D", ne, etype);
mfscanf(fpin, "%D", &i);
switch (*etype) {
case 1:
esize = 3;
break;
case 2:
esize = 4;
break;
case 3:
esize = 8;
break;
case 4:
esize = 4;
break;
case 5:
esize = 2;
break;
default:
errexit("Unknown mesh-element type: %d\n", *etype);
}
vwgt = idxmalloc(*ne, "ReadWgt: vwgt");
for (j=0, i=0; i<*ne; i++) {
mfscanf(fpin, "%D", vwgt+i);
for (k=0; k<esize; k++) {
mfscanf(fpin, "%D", &l);
j++;
}
}
gk_fclose(fpin);
return vwgt;
}
/*************************************************************************
* This function reads # of element of a mixed mesh
**************************************************************************/
idxtype MixedElements(char *filename)
{
idxtype ne;
FILE *fpin;
fpin = gk_fopen(filename, "r", __func__);
mfscanf(fpin, "%D", &ne);
gk_fclose(fpin);
return ne;
}
/*************************************************************************
* This function reads the element node array of a i Mixed mesh
**************************************************************************/
idxtype *ReadMixedMesh(char *filename, idxtype *ne, idxtype *nn, idxtype *etype)
{
idxtype i, j, k, esize;
idxtype *elmnts;
FILE *fpin;
idxtype sizes[]={-1,3,4,8,4,2};
fpin = gk_fopen(filename, "r", __func__);
mfscanf(fpin, "%D", ne);
elmnts = idxmalloc(8*(*ne), "ReadMixedMesh: elmnts");
for (j=0, i=0; i<*ne; i++) {
mfscanf(fpin, "%D", etype+i);
for (k=0;k<sizes[etype[i]];k++) {
mfscanf(fpin, "%D", elmnts+j);
elmnts[j++]--;
}
}
gk_fclose(fpin);
*nn = elmnts[idxargmax(j, elmnts)]+1;
return elmnts;
}
/*************************************************************************
* This function reads the element node array of a Mixed mesh with weight
**************************************************************************/
idxtype *ReadMixedMeshWgt(char *filename, idxtype *ne, idxtype *nn,
idxtype *etype, idxtype *vwgt)
{
idxtype i, j, k, esize;
idxtype *elmnts;
FILE *fpin;
idxtype sizes[]={-1,3,4,8,4,2};
fpin = gk_fopen(filename, "r", __func__);
mfscanf(fpin, "%D", ne);
mfscanf(fpin, "%D", nn);
elmnts = idxmalloc(8*(*ne), "ReadMixedMeshWgt: elmnts");
for (j=0, i=0; i<*ne; i++) {
mfscanf(fpin, "%D",etype+i);
mfscanf(fpin, "%D",vwgt+i);
for (k=0;k<sizes[etype[i]];k++) {
mfscanf(fpin, "%D", elmnts+j);
elmnts[j++]--;
}
}
gk_fclose(fpin);
*nn = elmnts[idxargmax(j, elmnts)]+1;
return elmnts;
}
/************************************************************************
* This function reads the element node array of a i Mixed mesh
**************************************************************************/
idxtype *ReadMgcnums(char *filename)
{
idxtype i;
idxtype *mgc;
FILE *fpin;
fpin = gk_fopen(filename, "r", __func__);
mgc = idxmalloc(36, "Readmgcnums: mgcnums");
for (i=0; i<36; i++) {
if (i<6 || i%6==0)
mgc[i]=-1;
else
mfscanf(fpin, "%D", mgc+i);
}
gk_fclose(fpin);
return mgc;
}
/*************************************************************************
* This function writes a graphs into a file
**************************************************************************/
void WriteGraph(char *filename, idxtype nvtxs, idxtype *xadj, idxtype *adjncy)
{
idxtype i, j;
FILE *fpout;
fpout = gk_fopen(filename, "w", __func__);
mfprintf(fpout, "%D %D", nvtxs, xadj[nvtxs]/2);
for (i=0; i<nvtxs; i++) {
mfprintf(fpout, "\n");
for (j=xadj[i]; j<xadj[i+1]; j++)
fprintf(fpout, " %" PRIIDX, adjncy[j]+1);
}
gk_fclose(fpout);
}
/*************************************************************************
* This function writes weighted graph into a file
**************************************************************************/
void WriteWgtGraph(char *filename, idxtype nvtxs, idxtype *xadj, idxtype *adjncy, idxtype *vwgt)
{
idxtype i, j;
FILE *fpout;
fpout = gk_fopen(filename, "w", __func__);
mfprintf(fpout, "%D %D", nvtxs, xadj[nvtxs]/2);
mfprintf(fpout, " %D", 10);
for (i=0; i<nvtxs; i++) {
fprintf(fpout, "\n");
fprintf(fpout, "%" PRIIDX, vwgt[i]);
for (j=xadj[i]; j<xadj[i+1]; j++)
fprintf(fpout, " %" PRIIDX, adjncy[j]+1);
}
gk_fclose(fpout);
}
/*************************************************************************
* This function writes a graphs into a file
**************************************************************************/
void WriteMocGraph(GraphType *graph)
{
idxtype i, j, nvtxs, ncon;
idxtype *xadj, *adjncy;
float *nvwgt;
char filename[256];
FILE *fpout;
nvtxs = graph->nvtxs;
ncon = graph->ncon;
xadj = graph->xadj;
adjncy = graph->adjncy;
nvwgt = graph->nvwgt;
msprintf(filename, "moc.graph.%D.%D", nvtxs, ncon);
fpout = gk_fopen(filename, "w", __func__);
mfprintf(fpout, "%D %D 10 1 %D", nvtxs, xadj[nvtxs]/2, ncon);
for (i=0; i<nvtxs; i++) {
mfprintf(fpout, "\n");
for (j=0; j<ncon; j++)
fprintf(fpout, "%" PRIIDX " ", (int)((float)10e6*nvwgt[i*ncon+j]));
for (j=xadj[i]; j<xadj[i+1]; j++)
fprintf(fpout, " %" PRIIDX, adjncy[j]+1);
}
gk_fclose(fpout);
}
<|start_filename|>RecastDemo/Contrib/metis/source/libmetis/metislib.h<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* metis.h
*
* This file includes all necessary header files
*
* Started 8/27/94
* George
*
* $Id: metislib.h,v 1.1 2002/08/10 06:29:31 karypis Exp $
*/
#include <GKlib.h>
#include <stddef.h>
#include <stdlib.h>
#include <stdarg.h>
#include <stdio.h>
#include <ctype.h>
#include <math.h>
#include <time.h>
#include <string.h>
#include <limits.h>
#include <signal.h>
#include <setjmp.h>
#include <assert.h>
#if defined(ENABLE_OPENMP)
#include <omp.h>
#endif
#include <metis.h>
#include <defs.h>
#include <struct.h>
#include <macros.h>
#include <rename.h>
#include <proto.h>
#if defined(COMPILER_MSC)
#define rint(x) ((idxtype)((x)+0.5)) /* MSC does not have rint() function */
#endif
#if defined(COMPILER_GCC)
extern char* strdup (const char *);
#endif
<|start_filename|>RecastDemo/Contrib/metis/source/programs/mesh2nodal.c<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* mesh2nodal.c
*
* This file reads in the element node connectivity array of a mesh and writes
* out its dual in the format suitable for Metis.
*
* Started 9/29/97
* George
*
* $Id: mesh2nodal.c,v 1.2 2002/08/10 06:02:53 karypis Exp $
*
*/
#include <metisbin.h>
/*************************************************************************
* Let the game begin
**************************************************************************/
int main(int argc, char *argv[])
{
idxtype i, j, ne, nn, etype, mtype, numflag=0;
idxtype *elmnts, *xadj, *adjncy, *metype, *weights;
double IOTmr, DUALTmr;
char fileout[256], etypestr[5][5] = {"TRI", "TET", "HEX", "QUAD", "LINE"};
if (argc != 2) {
mprintf("Usage: %s <meshfile>\n",argv[0]);
exit(0);
}
mtype=MeshType(argv[1]);
ne=MixedElements(argv[1]);
metype = idxmalloc(ne, "main: metype");
weights = idxmalloc(ne, "main: weights");
if (mtype==1 || mtype==3){
gk_clearcputimer(IOTmr);
gk_clearcputimer(DUALTmr);
gk_startcputimer(IOTmr);
if (mtype==1)
elmnts = ReadMesh(argv[1], &ne, &nn, &etype);
else
elmnts = ReadMeshWgt(argv[1], &ne, &nn, &etype, weights);
gk_stopcputimer(IOTmr);
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Mesh Information ----------------------------------------------------\n");
mprintf(" Name: %s, #Elements: %D, #Nodes: %D, Etype: %s\n\n", argv[1], ne, nn, etypestr[etype-1]);
mprintf("Forming Nodal Graph... ----------------------------------------------\n");
xadj = idxmalloc(nn+1, "main: xadj");
adjncy = idxmalloc(20*nn, "main: adjncy");
gk_startcputimer(DUALTmr);
METIS_MeshToNodal(&ne, &nn, elmnts, &etype, &numflag, xadj, adjncy);
gk_stopcputimer(DUALTmr);
mprintf(" Nodal Information: #Vertices: %D, #Edges: %D\n", nn, xadj[nn]/2);
msprintf(fileout, "%s.ngraph", argv[1]);
gk_startcputimer(IOTmr);
WriteGraph(fileout, nn, xadj, adjncy);
gk_stopcputimer(IOTmr);
mprintf("\nTiming Information --------------------------------------------------\n");
mprintf(" I/O: \t\t %7.3f\n", gk_getcputimer(IOTmr));
mprintf(" Nodal Creation:\t\t %7.3f\n", gk_getcputimer(DUALTmr));
mprintf("**********************************************************************\n");
}
else{
gk_clearcputimer(IOTmr);
gk_clearcputimer(DUALTmr);
gk_startcputimer(IOTmr);
if(mtype==0)
elmnts = ReadMixedMesh(argv[1], &ne, &nn, metype);
else
elmnts = ReadMixedMeshWgt(argv[1], &ne, &nn, metype, weights);
gk_stopcputimer(IOTmr);
mprintf("**********************************************************************\n");
mprintf("%s", METISTITLE);
mprintf("Mesh Information ----------------------------------------------------\n");
mprintf(" Name: %s, #Elements: %D, #Nodes: %D, Etype: %s\n\n", argv[1], ne, nn, "Mixed");
mprintf("Forming Nodal Graph... ----------------------------------------------\n");
xadj = idxmalloc(nn+1, "main: xadj");
adjncy = idxmalloc(20*nn, "main: adjncy");
gk_startcputimer(DUALTmr);
METIS_MixedMeshToNodal(&ne, &nn, elmnts, metype, &numflag, xadj, adjncy);
gk_stopcputimer(DUALTmr);
mprintf(" Nodal Information: #Vertices: %D, #Edges: %D\n", nn, xadj[nn]/2);
msprintf(fileout, "%s.ngraph", argv[1]);
gk_startcputimer(IOTmr);
WriteGraph(fileout, nn, xadj, adjncy);
gk_stopcputimer(IOTmr);
mprintf("\nTiming Information --------------------------------------------------\n");
mprintf(" I/O: \t\t %7.3f\n", gk_getcputimer(IOTmr));
mprintf(" Nodal Creation:\t\t %7.3f\n", gk_getcputimer(DUALTmr));
mprintf("**********************************************************************\n");
}
gk_free((void **)&elmnts, &xadj, &adjncy, &metype, &weights, LTERM);
}
<|start_filename|>RecastDemo/Contrib/metis/source/test/proto.h<|end_filename|>
/*
* Copyright 1997, Regents of the University of Minnesota
*
* proto.h
*
* This file contains header files
*
* Started 10/19/95
* George
*
* $Id: proto.h 1428 2007-04-06 23:37:27Z karypis $
*
*/
#ifndef _TEST_PROTO_H_
#define _TEST_PROTO_H_
void ReadGraph(GraphType *, char *, idxtype *);
void Test_PartGraph(idxtype, idxtype *, idxtype *);
int VerifyPart(idxtype, idxtype *, idxtype *, idxtype *, idxtype *, idxtype, idxtype, idxtype *);
int VerifyWPart(idxtype, idxtype *, idxtype *, idxtype *, idxtype *, idxtype, float *, idxtype, idxtype *);
void Test_PartGraphV(idxtype, idxtype *, idxtype *);
int VerifyPartV(idxtype, idxtype *, idxtype *, idxtype *, idxtype *, idxtype, idxtype, idxtype *);
int VerifyWPartV(idxtype, idxtype *, idxtype *, idxtype *, idxtype *, idxtype, float *, idxtype, idxtype *);
void Test_PartGraphmC(idxtype, idxtype *, idxtype *);
int VerifyPartmC(idxtype, idxtype, idxtype *, idxtype *, idxtype *, idxtype *, idxtype, float *, idxtype, idxtype *);
void Test_ND(idxtype, idxtype *, idxtype *);
int VerifyND(idxtype, idxtype *, idxtype *);
#endif
<|start_filename|>RecastDemo/Contrib/metis/source/programs/defs.h<|end_filename|>
/*
* defs.h
*
* This file contains various constant definitions
*
* Started 8/9/02
* George
*
*/
#define CMD_MTYPE 1
#define CMD_ITYPE 2
#define CMD_RTYPE 3
#define CMD_BALANCE 10
#define CMD_NITER 20
#define CMD_NTRIALS 21
#define CMD_TPWGTS 30
#define CMD_SEED 50
#define CMD_OUTPUT 100
#define CMD_NOOUTPUT 101
#define CMD_DBGLVL 1000
#define CMD_HELP 1001
/* The text labels for MTypes */
static char mtypenames[][10] = {"", "None", "MAXTF", "SQRT", "LOG", "IDF"};
/* The text labels for ITypes */
static char itypenames[][10] = {"", "None", "IDF"};
/* The text labels for RTypes */
static char rtypenames[][20] = {"", "I1", "I2", "E1", "G1", "G1'", "H1", "H2", "SLINK",
"SLINK_W", "CLINK", "CLINK_W", "UPGMA", "UPGMA_W",
"UPGMA_W2", "Cut", "RCut", "NCut", "MMCut"};
| educharlie/HPA-NavMesh |
<|start_filename|>sandbox/test_consructors.jl<|end_filename|>
using Oceananigans
using Oceanostics
grid = RegularRectilinearGrid(size=(4,4,4), extent=(1,1,1))
model = IncompressibleModel(grid=grid)
u, v, w = model.velocities
ke = Oceanostics.KineticEnergy(model, u, v, w)
SPx = Oceanostics.XShearProduction(model, u, v, w, 0, 0, 0)
SPy = Oceanostics.YShearProduction(model, u, v, w, 0, 0, 0)
SPz = Oceanostics.ZShearProduction(model, u, v, w, 0, 0, 0)
#ε_iso = Oceanostics.IsotropicViscousDissipation(model, ν, u, v, w)
#ε_ani = Oceanostics.AnisotropicViscousDissipation(model, ν, ν, ν, u, v, w)
| glwagner/Oceanostics.jl |
<|start_filename|>app/assets/javascripts/flashes.js<|end_filename|>
$(function(){
var flashCallback;
flashCallback = function() {
return $(".flashes").fadeOut();
};
$(".flashes").bind('click', (function(_this) {
return function(event) {
return $(".flashes").fadeOut();
};
})(this));
setTimeout(flashCallback, 3000);
});
<|start_filename|>app/assets/javascripts/sticky.js<|end_filename|>
$(function(){
var $filterHeight = $( '.filter-area' ).outerHeight();
$( '.main-content' ).css({
'margin-top': $filterHeight
});
});
| usdigitalresponse/dolores-landingham-slack-bot |
<|start_filename|>jenkins/webhook-proxy/test/fixtures/build-rejected-openshift-response.json<|end_filename|>
{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"BuildConfig.build.openshift.io \"-\" is invalid: metadata.name: Invalid value: \"-\": a DNS-1123 subdomain must consist of lower case alphanumeric characters, '-' or '.', and must start and end with an alphanumeric character (e.g. 'example.com', regex used for validation is '[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*')","reason":"Invalid","details":{"name":"-","group":"build.openshift.io","kind":"BuildConfig","causes":[{"reason":"FieldValueInvalid","message":"Invalid value: \"-\": a DNS-1123 subdomain must consist of lower case alphanumeric characters, '-' or '.', and must start and end with an alphanumeric character (e.g. 'example.com', regex used for validation is '[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*')","field":"metadata.name"}]},"code":422}
<|start_filename|>jenkins/webhook-proxy/Makefile<|end_filename|>
test: test-unit
test-unit:
@(go test -v -cover)
fmt:
@(gofmt -w .)
lint:
golangci-lint run
build: build-linux build-darwin build-windows
build-linux:
GOOS=linux GOARCH=amd64 CGO_ENABLED=0 go build -o webhook-proxy_linux_amd64
build-darwin:
GOOS=darwin GOARCH=amd64 CGO_ENABLED=0 go build -o webhook-proxy_darwin_amd64
build-windows:
GOOS=windows GOARCH=amd64 CGO_ENABLED=0 go build -o webhook-proxy_windows_amd64.exe
<|start_filename|>infrastructure-setup/scripts/json/createUser.json<|end_filename|>
{
"name":"createUser",
"content":"import groovy.json.JsonSlurper;
def user = new JsonSlurper().parseText(args);
security.addUser(user.id, user.firstName, user.lastName, user.email, true, user.password, user.roleIds);",
"type":"groovy"
}
<|start_filename|>jenkins/master/configuration/init.groovy.d/sonarqube.groovy<|end_filename|>
import jenkins.model.*
import hudson.plugins.sonar.*
import hudson.plugins.sonar.model.*
import hudson.plugins.sonar.utils.SQServerVersions
def jen = Jenkins.getInstance()
def env = System.getenv()
// https://github.com/SonarSource/sonar-scanner-jenkins/blob/sonar-2.6/src/main/java/hudson/plugins/sonar/SonarGlobalConfiguration.java
def conf = jen.getDescriptor("hudson.plugins.sonar.SonarGlobalConfiguration")
// https://github.com/SonarSource/sonar-scanner-jenkins/blob/sonar-2.6/src/main/java/hudson/plugins/sonar/SonarInstallation.java
def inst = new SonarInstallation(
"SonarServerConfig",
env['SONAR_SERVER_URL'],
SQServerVersions.SQ_5_3_OR_HIGHER,
env['SONAR_SERVER_AUTH_TOKEN'],
"",
"",
"",
"",
"",
new TriggersConfig(),
"",
"",
""
)
conf.setInstallations(inst)
conf.save()
<|start_filename|>nexus/cleanup_nexus_old_candidates.groovy<|end_filename|>
import org.sonatype.nexus.repository.storage.StorageFacet;
import org.sonatype.nexus.repository.storage.Query;
def repositoryName = 'candidates';
// counter is zero based so a counter of 9 keeps 10 entries
def maxArtifactCount = 9;
// Get a repository
def repo = repository.repositoryManager.get(repositoryName);
// Get a database transaction
def tx = repo.facet(StorageFacet).txSupplier().get();
try {
// Begin the transaction
tx.begin();
def previousComponent = null;
def uniqueComponents = [];
tx.findComponents(Query.builder().suffix(' ORDER BY group, name').build(), [repo]).each{component ->
if (previousComponent == null || (!component.group().equals(previousComponent.group()) || !component.name().equals(previousComponent.name()))) {
uniqueComponents.add(component);
}
previousComponent = component;
}
uniqueComponents.each {uniqueComponent ->
def componentVersions = tx.findComponents(Query.builder().where('group = ').param(uniqueComponent.group()).and('name = ').param(uniqueComponent.name()).suffix(' ORDER BY last_updated DESC').build(), [repo]);
log.info(uniqueComponent.group() + ", " + uniqueComponent.name() + " size " + componentVersions.size());
if (componentVersions.size() > maxArtifactCount) {
componentVersions.eachWithIndex { component, index ->
if (index > maxArtifactCount) {
log.info("Deleting Component ${component.group()} ${component.name()} ${component.version()}")
tx.deleteComponent(component);
}
}
}
}
} finally {
// End the transaction
tx.commit();
}
<|start_filename|>infrastructure-setup/scripts/json/createRole.json<|end_filename|>
{
"name":"createRole",
"content":"import groovy.json.JsonSlurper;
def role = new JsonSlurper().parseText(args);
security.addRole(role.id, role.name, role.description, role.privilegeIds, role.roleIds);",
"type":"groovy"
}
| ungerts/ods-core |
<|start_filename|>src/lib/ReqParse.js<|end_filename|>
const request = require('request');
const cheerio = require('cheerio');
const URL = require('url');
const readline = require('readline')
const host = "https://www.xvideos.com";
/*
Keyword(homepage) -> Page(loop?) -> URI -> request(async)
-> body -> "video_list"(append?)
*/
function homepageUrl(page){
if(page===0) return host;
return host+URL.format({
path:'/new/'+page,
json:true
});
}
function keywordUrl(keyword,page){
return host+URL.format({
query:{
k:keyword,
p:page
},
json:true
});
}
function req(url){
return new Promise(function(resolve,reject){
request(
{
method:'GET',
header:{'Content-Type' : 'application/json; charset=UTF-8'},
uri:url
},(err,res,body)=>{
if(!err){
try{
resolve(body);
}catch(err){
reject(Error('no content'));
}
}else{
reject('No response');
}
})
})
}
const parseVideo = (body,page)=>{
let $ = cheerio.load(body);
let content = $('#content');
let avArray = [];
let video = content.find('.thumb-block')
if(video[0]===undefined) throw new Error('no content')
for(let i=0;i<video.length;i++){
let obj = {
page:page,
attr:{
index:i,
name:video.eq(i).children('div.thumb-under').find('a').text().substr(0,30),
link:video.eq(i).children('div.thumb-under').find('a').attr('href')
}
}
avArray.push(obj);
}
return avArray;
}
const parseTag = (body)=>{
let $ = cheerio.load(body);
let tagTable = $('.video-tags-list');
let tag = [];
let element = tagTable.find('li');
if(element[1]===undefined) throw new Error('no tag');
for(let i=0;i<element.length-1;i++){
text = element.eq(i).children('a').text();
if(text.charAt(0)!='\n') tag.push(text);
}
tag = tag.slice(0,5);//save first five tag
return tag;
}
exports.tagCrawler = async (path)=>{
let body = await req(host+path);
let taglist = parseTag(body);
return taglist
}
exports.homepageCrawler = async (page)=>{
let body = await req(homepageUrl(page));
let avlist = parseVideo(body,page);
return avlist;
}
exports.keywordCrawler = async (keyword,page)=>{
let body = await req(keywordUrl(keyword,page));
let avlist = parseVideo(body,page);
return avlist;
}
<|start_filename|>dist/lib/ReqParse.js<|end_filename|>
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
const request = require('request');
const cheerio = require('cheerio');
const URL = require('url');
const readline = require('readline');
const host = "https://www.xvideos.com";
/*
Keyword(homepage) -> Page(loop?) -> URI -> request(async)
-> body -> "video_list"(append?)
*/
function homepageUrl(page) {
if (page === 0) return host;
return host + URL.format({
path: '/new/' + page,
json: true
});
}
function keywordUrl(keyword, page) {
return host + URL.format({
query: {
k: keyword,
p: page
},
json: true
});
}
function req(url) {
return new Promise(function (resolve, reject) {
request({
method: 'GET',
header: { 'Content-Type': 'application/json; charset=UTF-8' },
uri: url
}, (err, res, body) => {
if (!err) {
try {
resolve(body);
} catch (err) {
reject(Error('no content'));
}
} else {
reject('No response');
}
});
});
}
const parseVideo = (body, page) => {
let $ = cheerio.load(body);
let content = $('#content');
let avArray = [];
let video = content.find('.thumb-block');
if (video[0] === undefined) throw new Error('no content');
for (let i = 0; i < video.length; i++) {
let obj = {
page: page,
attr: {
index: i,
name: video.eq(i).children('div.thumb-under').find('a').text().substr(0, 30),
link: video.eq(i).children('div.thumb-under').find('a').attr('href')
}
};
avArray.push(obj);
}
return avArray;
};
const parseTag = body => {
let $ = cheerio.load(body);
let tagTable = $('.video-tags-list');
let tag = [];
let element = tagTable.find('li');
if (element[1] === undefined) throw new Error('no tag');
for (let i = 0; i < element.length - 1; i++) {
text = element.eq(i).children('a').text();
if (text.charAt(0) != '\n') tag.push(text);
}
tag = tag.slice(0, 5); //save first five tag
return tag;
};
exports.tagCrawler = (() => {
var _ref = _asyncToGenerator(function* (path) {
let body = yield req(host + path);
let taglist = parseTag(body);
return taglist;
});
return function (_x) {
return _ref.apply(this, arguments);
};
})();
exports.homepageCrawler = (() => {
var _ref2 = _asyncToGenerator(function* (page) {
let body = yield req(homepageUrl(page));
let avlist = parseVideo(body, page);
return avlist;
});
return function (_x2) {
return _ref2.apply(this, arguments);
};
})();
exports.keywordCrawler = (() => {
var _ref3 = _asyncToGenerator(function* (keyword, page) {
let body = yield req(keywordUrl(keyword, page));
let avlist = parseVideo(body, page);
return avlist;
});
return function (_x3, _x4) {
return _ref3.apply(this, arguments);
};
})();
<|start_filename|>src/lib/ctrl.js<|end_filename|>
const chalk = require('chalk');
let xvideo = require('./ReqParse.js');
const RESET = ()=> process.stdout.write('\x1bc');
const BOTTOM = `==========================================================
"right" : See the video tag
"space" : Add favorite
"enter" : Watch the video`;
class Page{
constructor(){
this.page = 0;//initial
this.videoList = [];
this.pointer = 0;
this.index = 0;
this.loadingState = 0;
}
async nextPage(){
try{
let temp = await xvideo.homepageCrawler(this.page);
this.videoList = this.videoList.concat(temp);
this.page+=1;
}catch(err){
console.log('No more home page :(');
throw new Error('no content');
}
};
renderTen(){
try{
RESET();
for(let i=this.index;i<this.index+10;i++){
if(i==this.pointer){
console.log(`-> ${chalk.red.bold(this.videoList[i].attr.name)}`)
if(this.videoList[i].tag!==undefined ){
if(this.videoList[i].tag[0]!='W'){
for(let j=0;j<this.videoList[i].tag.length;j++){
console.log(` --> ${chalk.bgGreen.white(this.videoList[i].tag[j])}`);
}
}else{
console.log(` --> ${chalk.bgYellow.white(this.videoList[i].tag)}`);
}
}
}
else console.log(` ${this.videoList[i].attr.name}`);
}
console.log(BOTTOM);
}catch(err){
RESET()
console.log('loading...')
if(this.loadingState==0){
this.nextPage().then(()=>{
this.renderTen()
this.loadingState=0;
},err=>{console.log(err)});
this.loadingState=1;
}
}
}
down(){
this.pointer+=1;
if(Math.floor(this.pointer/10)> Math.floor((this.pointer-1)/10)) this.index+=10;
//console.log(obj.index);
this.renderTen();
}
up(){
if(this.pointer==0){
this.renderTen();
return;
}
this.pointer-=1;
if(Math.floor(this.pointer/10)< Math.floor((this.pointer+1)/10)) this.index-=10;
this.renderTen();
}
async right(){
try{
if(this.videoList[this.pointer].tag===undefined){
this.videoList[this.pointer].tag = 'Waiting...'
this.renderTen();
let tag = await xvideo.tagCrawler(this.videoList[this.pointer].attr.link);
this.videoList[this.pointer].tag = tag;
this.renderTen();
}
}catch(err){
this.videoList[this.pointer].tag = 'No Tag'
this.renderTen();
}
};
async save(){
if(this.videoList[this.pointer].tag===undefined){
this.videoList[this.pointer].tag = 'Waiting...'
this.renderTen();
let tag = await xvideo.tagCrawler(this.videoList[this.pointer].attr.link);
this.videoList[this.pointer].tag = tag;
}
const fav = require('./favjson.js');
fav.addjson(this.videoList[this.pointer]);
this.renderTen();
console.log(chalk.bgGreen.white('Save to favorite Success!'));
}
open(){
const opn = require('openurl');
if(this.videoList[this.pointer]!==undefined){
const host = 'https://www.xvideos.com'
let videoUrl = host+this.videoList[this.pointer].attr.link;
opn.open(videoUrl);
}
}
//default return this
}
exports.homepage = new Page();
exports.keypage = class Keypage extends Page{
constructor(key){
super();
this.key = key;
}
async nextPage(){
try{
let temp = await xvideo.keywordCrawler(this.key, this.page);
this.videoList = this.videoList.concat(temp);
this.page+=1;
}catch(err){
console.log('No keyword porn find...sorry:(')
throw new Error('no content');
}
};
}
exports.favpage = class Favpage extends Page{
constructor(){
super();
const favorite = require('./favjson.js');
this.videoList = favorite.readjson();
}
down(){
this.pointer==this.videoList.length-1?this.pointer=this.videoList.length-1:this.pointer+=1;
if(Math.floor(this.pointer/10)> Math.floor((this.pointer-1)/10)) this.index+=10;
//console.log(obj.index);
this.renderTen();
}
delete(){
RESET();
try{
for(let i=this.index;i<this.index+10;i++){
if(this.videoList[i]==undefined) break;
if(i==this.pointer) console.log('-> Delete')
else console.log(` ${obj.videoList[i].attr.name}`);
}
this.videoList.splice(this.pointer,1);
fav.cleanjson();
for(let i=0;i<this.videoList.length;i++){
fav.addjson(this.videoList[i]);
}
}catch(err){
console.log(err);
console.log('Nothing in your fav list, press "<-" back to menu')
}
}
right() {
RESET();
try{
for(let i=this.index;i<this.index+10;i++){
if(this.videoList[i]==undefined) break;
if(i==this.pointer){
console.log(`-> ${chalk.red.bold(this.videoList[i].attr.name)}`);
if(this.videoList[i].tag!==undefined){
for(let j=0;j<this.videoList[i].tag.length;j++){
console.log(` -->${chalk.bgGreen.white(this.videoList[i].tag[j])}`);
}
}
}
else console.log(` ${this.videoList[i].attr.name}`);
}
console.log(BOTTOM)
}catch(err){
console.log('press "<-" back to menu')
}
}
renderTen(){
RESET();
try{
for(let i=this.index;i<this.index+10;i++){
if(this.videoList[i]==undefined) break;
if(i==this.pointer) console.log(`-> ${chalk.red.bold(this.videoList[i].attr.name)}`);
else console.log(` ${this.videoList[i].attr.name}`);
}
console.log(BOTTOM);
}catch(err){
console.log(err)
console.log('Nothing in your fav list, press "<-" back to menu')
}
}
}
<|start_filename|>src/lib/menu.js<|end_filename|>
const keypress = require('keypress');
const readline = require('readline');
const ctrl = require('./ctrl.js');
let home = ctrl.homepage;
function hchoose(ch,key){
switch(key.name){
case 'up':
home.up();
break;
case 'down':
home.down();
break;
case 'left':
process.stdin.removeListener('keypress',hchoose);
const _menu = require('./menu.js');
_menu();
break;
case 'right':
home.right()
break;
case 'space':
home.save();
break;
case 'return':
home.open();
break;
}
}
function fchoose(ch,key){
let fav = new ctrl.favpage();
switch(key.name){
case 'up':
fav.up()
break;
case 'down':
fav.down();
break;
case 'left':
process.stdin.removeListener('keypress',fchoose);
const _menu = require('./menu.js');
_menu();
break;
case 'right':
fav.right();
break;
case 'd':
fav.delete();
break;
case 'return':
fav.open();
}
}
module.exports = function(){
let keyword = new ctrl.keypage('');
function kchoose(ch,key){
switch(key.name){
case 'up':
keyword.up();
break;
case 'down':
keyword.down();
break;
case 'left':
process.stdin.removeListener('keypress',kchoose);
const _menu = require('./menu.js');
_menu();
break;
case 'right':
keyword.right();
break;
case 'space':
keyword.save();
break;
case 'return':
keyword.open();
}
}
process.stdout.write('\x1bc');
const inquirer = require('inquirer');
inquirer.prompt([{
type:'list',
name:'choice',
message:'What do you want?',
choices:[
new inquirer.Separator(),
{name:'Watch the home page',value:'home'},
{name:'Input your keyword to find',value:'keyword'},
{name:'Favorite',value:'favorite'},
new inquirer.Separator(),
{name:'Exit',value:'exit'}]
}]).then(async function(answer){
switch(answer.choice){
case "home":
keypress(process.stdin);
process.stdin.on('keypress',hchoose);
process.stdin.setRawMode(true);
process.stdin.resume();
home.renderTen();
break;
case "keyword":
if(keyword.key!==''){
keypress(process.stdin);
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on('keypress',kchoose);
keyword.renderTen();
}
else{
process.stdin.setRawMode(false);
let rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
console.log('Give me keyword: ')
rl.on('line',answer=>{
keyword.key = answer;
rl.close();
})
rl.on('close',function(){
if(keyword.key===''){
console.log('keyword empty');
process.exit();
}
keypress(process.stdin);
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on('keypress',(ch,key)=>{
if(key && key.ctrl &&key.name=='c'){
process.stdin.pause();
}
})
process.stdin.on('keypress',kchoose);
keyword.renderTen();
})
}
break;
case "favorite":
let fav = new ctrl.favpage();
keypress(process.stdin);
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on('keypress',fchoose);
fav.renderTen();
//Favorite page listener
break;
case "exit":
process.stdout.write('\x1bc');
console.log('Good bye')
process.exit();
break;
default:
break;
}
})
}
<|start_filename|>dist/lib/menu.js<|end_filename|>
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
const keypress = require('keypress');
const readline = require('readline');
const ctrl = require('./ctrl.js');
let home = ctrl.homepage;
function hchoose(ch, key) {
switch (key.name) {
case 'up':
home.up();
break;
case 'down':
home.down();
break;
case 'left':
process.stdin.removeListener('keypress', hchoose);
const _menu = require('./menu.js');
_menu();
break;
case 'right':
home.right();
break;
case 'space':
home.save();
break;
case 'return':
home.open();
break;
}
}
function fchoose(ch, key) {
let fav = new ctrl.favpage();
switch (key.name) {
case 'up':
fav.up();
break;
case 'down':
fav.down();
break;
case 'left':
process.stdin.removeListener('keypress', fchoose);
const _menu = require('./menu.js');
_menu();
break;
case 'right':
fav.right();
break;
case 'd':
fav.delete();
break;
case 'return':
fav.open();
}
}
module.exports = function () {
let keyword = new ctrl.keypage('');
function kchoose(ch, key) {
switch (key.name) {
case 'up':
keyword.up();
break;
case 'down':
keyword.down();
break;
case 'left':
process.stdin.removeListener('keypress', kchoose);
const _menu = require('./menu.js');
_menu();
break;
case 'right':
keyword.right();
break;
case 'space':
keyword.save();
break;
case 'return':
keyword.open();
}
}
process.stdout.write('\x1bc');
const inquirer = require('inquirer');
inquirer.prompt([{
type: 'list',
name: 'choice',
message: 'What do you want?',
choices: [new inquirer.Separator(), { name: 'Watch the home page', value: 'home' }, { name: 'Input your keyword to find', value: 'keyword' }, { name: 'Favorite', value: 'favorite' }, new inquirer.Separator(), { name: 'Exit', value: 'exit' }]
}]).then((() => {
var _ref = _asyncToGenerator(function* (answer) {
switch (answer.choice) {
case "home":
keypress(process.stdin);
process.stdin.on('keypress', hchoose);
process.stdin.setRawMode(true);
process.stdin.resume();
home.renderTen();
break;
case "keyword":
if (keyword.key !== '') {
keypress(process.stdin);
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on('keypress', kchoose);
keyword.renderTen();
} else {
process.stdin.setRawMode(false);
let rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
console.log('Give me keyword: ');
rl.on('line', function (answer) {
keyword.key = answer;
rl.close();
});
rl.on('close', function () {
if (keyword.key === '') {
console.log('keyword empty');
process.exit();
}
keypress(process.stdin);
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on('keypress', (ch, key) => {
if (key && key.ctrl && key.name == 'c') {
process.stdin.pause();
}
});
process.stdin.on('keypress', kchoose);
keyword.renderTen();
});
}
break;
case "favorite":
let fav = new ctrl.favpage();
keypress(process.stdin);
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on('keypress', fchoose);
fav.renderTen();
//Favorite page listener
break;
case "exit":
process.stdout.write('\x1bc');
console.log('Good bye');
process.exit();
break;
default:
break;
}
});
return function (_x) {
return _ref.apply(this, arguments);
};
})());
};
<|start_filename|>test/menu_test.js<|end_filename|>
var expect = require('chai').expect
var menu = require('../lib/lib/menu');
describe('[lib] Menu test',()=>{
beforeEach(()=>{
process.stdout.write = ()=>{};
})
afterEach(()=>{
process.stdin.pause();
delete process.stdout.write;
})
it('fuck',()=>{
expect(()=>menu()).to.not.throw();
})
})
<|start_filename|>src/index.js<|end_filename|>
#!/usr/bin/env node
const chalk = require('chalk');
const RESET = ()=> process.stdout.write('\x1bc');
const WELCOME = `===============================
| |
| Welcome to ${chalk.bgWhite.red.bold('Xvideo.js')} |
| |
===============================`;
RESET();
console.log(WELCOME);
const inquirer = require('inquirer');
inquirer.prompt([{
type:'confirm',
name:'NSFW',
message:'Are you 18?'
}]).then(ans=>{
if(ans.NSFW){
const _menu = require('./lib/menu.js')
_menu();
}else{
RESET();
console.log('okay goodbye');
process.stdin.pause();
}
})
/*arg parse
if(process.argv[2]=='-k'){
if(process.argv[3]!=undefined){
const keypress = require('keypress')
let lis = require('./xvideo/listen.js');
lis.keywords.keyword = process.argv[3];
keypress(process.stdin);
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on('keypress',(ch,key)=>{
if(key && key.ctrl &&key.name=='c'){
console.reset();
process.stdin.pause();
}
})
process.stdin.on('keypress',lis.kchoose);
lis.keywords.renderTen();
}
}else{
*/
//need global listen key "q"
<|start_filename|>dist/lib/favjson.js<|end_filename|>
const fs = require('fs');
exports.addjson = function addJSONsync(something) {
let obj;
try {
obj = fs.readFileSync('./fav.json');
obj = JSON.parse(obj);
} catch (err) {
//not exist
obj = [];
}
obj.push(something);
fs.writeFileSync('./fav.json', JSON.stringify(obj));
};
exports.readjson = function readJSONsync() {
let obj;
try {
obj = fs.readFileSync('./fav.json');
obj = JSON.parse(obj);
} catch (err) {
obj = [];
}
return obj;
};
exports.cleanjson = function cleanJSONsync() {
let obj = [];
fs.writeFileSync('./fav.json', JSON.stringify(obj));
};
<|start_filename|>dist/lib/ctrl.js<|end_filename|>
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
const chalk = require('chalk');
let xvideo = require('./ReqParse.js');
const RESET = () => process.stdout.write('\x1bc');
const BOTTOM = `==========================================================
"right" : See the video tag
"space" : Add favorite
"enter" : Watch the video`;
class Page {
constructor() {
this.page = 0; //initial
this.videoList = [];
this.pointer = 0;
this.index = 0;
this.loadingState = 0;
}
nextPage() {
var _this = this;
return _asyncToGenerator(function* () {
try {
let temp = yield xvideo.homepageCrawler(_this.page);
_this.videoList = _this.videoList.concat(temp);
_this.page += 1;
} catch (err) {
console.log('No more home page :(');
throw new Error('no content');
}
})();
}
renderTen() {
try {
RESET();
for (let i = this.index; i < this.index + 10; i++) {
if (i == this.pointer) {
console.log(`-> ${chalk.red.bold(this.videoList[i].attr.name)}`);
if (this.videoList[i].tag !== undefined) {
if (this.videoList[i].tag[0] != 'W') {
for (let j = 0; j < this.videoList[i].tag.length; j++) {
console.log(` --> ${chalk.bgGreen.white(this.videoList[i].tag[j])}`);
}
} else {
console.log(` --> ${chalk.bgYellow.white(this.videoList[i].tag)}`);
}
}
} else console.log(` ${this.videoList[i].attr.name}`);
}
console.log(BOTTOM);
} catch (err) {
RESET();
console.log('loading...');
if (this.loadingState == 0) {
this.nextPage().then(() => {
this.renderTen();
this.loadingState = 0;
}, err => {
console.log(err);
});
this.loadingState = 1;
}
}
}
down() {
this.pointer += 1;
if (Math.floor(this.pointer / 10) > Math.floor((this.pointer - 1) / 10)) this.index += 10;
//console.log(obj.index);
this.renderTen();
}
up() {
if (this.pointer == 0) {
this.renderTen();
return;
}
this.pointer -= 1;
if (Math.floor(this.pointer / 10) < Math.floor((this.pointer + 1) / 10)) this.index -= 10;
this.renderTen();
}
right() {
var _this2 = this;
return _asyncToGenerator(function* () {
try {
if (_this2.videoList[_this2.pointer].tag === undefined) {
_this2.videoList[_this2.pointer].tag = 'Waiting...';
_this2.renderTen();
let tag = yield xvideo.tagCrawler(_this2.videoList[_this2.pointer].attr.link);
_this2.videoList[_this2.pointer].tag = tag;
_this2.renderTen();
}
} catch (err) {
_this2.videoList[_this2.pointer].tag = 'No Tag';
_this2.renderTen();
}
})();
}
save() {
var _this3 = this;
return _asyncToGenerator(function* () {
if (_this3.videoList[_this3.pointer].tag === undefined) {
_this3.videoList[_this3.pointer].tag = 'Waiting...';
_this3.renderTen();
let tag = yield xvideo.tagCrawler(_this3.videoList[_this3.pointer].attr.link);
_this3.videoList[_this3.pointer].tag = tag;
}
const fav = require('./favjson.js');
fav.addjson(_this3.videoList[_this3.pointer]);
_this3.renderTen();
console.log(chalk.bgGreen.white('Save to favorite Success!'));
})();
}
open() {
const opn = require('openurl');
if (this.videoList[this.pointer] !== undefined) {
const host = 'https://www.xvideos.com';
let videoUrl = host + this.videoList[this.pointer].attr.link;
opn.open(videoUrl);
}
}
//default return this
}
exports.homepage = new Page();
exports.keypage = class Keypage extends Page {
constructor(key) {
super();
this.key = key;
}
nextPage() {
var _this4 = this;
return _asyncToGenerator(function* () {
try {
let temp = yield xvideo.keywordCrawler(_this4.key, _this4.page);
_this4.videoList = _this4.videoList.concat(temp);
_this4.page += 1;
} catch (err) {
console.log('No keyword porn find...sorry:(');
throw new Error('no content');
}
})();
}
};
exports.favpage = class Favpage extends Page {
constructor() {
super();
const favorite = require('./favjson.js');
this.videoList = favorite.readjson();
}
down() {
this.pointer == this.videoList.length - 1 ? this.pointer = this.videoList.length - 1 : this.pointer += 1;
if (Math.floor(this.pointer / 10) > Math.floor((this.pointer - 1) / 10)) this.index += 10;
//console.log(obj.index);
this.renderTen();
}
delete() {
RESET();
try {
for (let i = this.index; i < this.index + 10; i++) {
if (this.videoList[i] == undefined) break;
if (i == this.pointer) console.log('-> Delete');else console.log(` ${obj.videoList[i].attr.name}`);
}
this.videoList.splice(this.pointer, 1);
fav.cleanjson();
for (let i = 0; i < this.videoList.length; i++) {
fav.addjson(this.videoList[i]);
}
} catch (err) {
console.log(err);
console.log('Nothing in your fav list, press "<-" back to menu');
}
}
right() {
RESET();
try {
for (let i = this.index; i < this.index + 10; i++) {
if (this.videoList[i] == undefined) break;
if (i == this.pointer) {
console.log(`-> ${chalk.red.bold(this.videoList[i].attr.name)}`);
if (this.videoList[i].tag !== undefined) {
for (let j = 0; j < this.videoList[i].tag.length; j++) {
console.log(` -->${chalk.bgGreen.white(this.videoList[i].tag[j])}`);
}
}
} else console.log(` ${this.videoList[i].attr.name}`);
}
console.log(BOTTOM);
} catch (err) {
console.log('press "<-" back to menu');
}
}
renderTen() {
RESET();
try {
for (let i = this.index; i < this.index + 10; i++) {
if (this.videoList[i] == undefined) break;
if (i == this.pointer) console.log(`-> ${chalk.red.bold(this.videoList[i].attr.name)}`);else console.log(` ${this.videoList[i].attr.name}`);
}
console.log(BOTTOM);
} catch (err) {
console.log(err);
console.log('Nothing in your fav list, press "<-" back to menu');
}
}
}; | realdennis/xvideo-js |
<|start_filename|>test/index.js<|end_filename|>
const tester = require("tester")
, match = require("../lib")
;
tester.describe("run the tests", test => {
var s = "Hello _World_ and _Mars_";
test.it("match all", () => {
test.expect(match(s, /_([a-z]+)_/gi).toArray()).toEqual(["World", "Mars"]);
});
test.it("get matches one by one", () => {
var m = match(s, /_([a-z]+)_/gi);
test.expect(m.next()).toBe("World");
test.expect(m.next()).toBe("Mars");
test.expect(m.next()).toBe(null);
});
test.it("reset the index", () => {
var m = match("Hello _World_, _Mars_, _Pluto_ and _Moon_!", /_([a-z]+)_/gi);
test.expect(m.next()).toBe("World");
m.reset();
test.expect(m.next()).toBe("World");
test.expect(m.next()).toBe("Mars");
m.reset(20);
test.expect(m.next()).toBe("Pluto");
});
test.it("alternate regex", () => {
var m = match("Hello World and Mars", /(World)|(Mars)/gi);
test.expect(m.next()).toBe("World");
test.expect(m.next()).toBe("Mars");
m.reset();
test.expect(m.toArray()).toEqual(["World", "Mars"])
});
});
<|start_filename|>example/index.js<|end_filename|>
"use strict";
const matchAll = require("../lib");
let s = "Hello _World_ and _Mars_";
console.log(matchAll(s, /_([a-z]+)_/gi).toArray());
// => [ "World", "Mars" ]
// Get matches one by one
let m = matchAll(s, /_([a-z]+)_/gi);
console.log(m.next());
// => "World"
console.log(m.next());
// => "Mars"
console.log(m.next());
// => null
let multipleGroup = "Hello _World001_ and _World002_";
console.log(matchAll(multipleGroup, /_([a-z]+([0-9]+))_/gi).toArray());
// => [ "World", "Mars" ]
// Get matches one by one
let multipleMatch = matchAll(multipleGroup, /_([a-z]+([0-9]+))_/gi);
console.log(multipleMatch.nextRaw());
// => [..., "World001", "001"]
console.log(multipleMatch.nextRaw());
// => [..., "World002", "002"]
let m2 = matchAll("Hello _World_, _Mars_, _Pluto_ and _Moon_!", /_([a-z]+)_/gi);
console.log(m2.next());
// => "World"
// Reset the match index
m2.reset();
console.log(m2.next());
// => "World"
console.log(m2.next());
// => "Mars"
// Set the match index to 20
m2.reset(20);
console.log(m2.next());
// => "Pluto"
<|start_filename|>lib/index.js<|end_filename|>
/**
* matchAll
* Get all the matches for a regular expression in a string.
*
* @name matchAll
* @function
* @param {String} s The input string.
* @param {RegExp} r The regular expression.
* @return {Object} An object containing the following fields:
*
* - `input` (String): The input string.
* - `regex` (RegExp): The regular expression.
* - `next` (Function): Get the next match.
* - `toArray` (Function): Get all the matches.
* - `reset` (Function): Reset the index.
*/
module.exports = function matchAll (s, r) {
return {
input: s
, regex: r
/**
* next
* Get the next match in single group match.
*
* @name next
* @function
* @return {String|null} The matched snippet.
*/
, next () {
let c = this.nextRaw()
if (c) {
for (let i = 1; i < c.length; i++) {
if (c[i]) {
return c[i]
}
}
}
return null
}
/**
* nextRaw
* Get the next match in raw regex output. Usefull to get another group match.
*
* @name nextRaw
* @function
* @returns {Array|null} The matched snippet
*/
, nextRaw () {
let c = this.regex.exec(this.input)
return c
}
/**
* toArray
* Get all the matches.
*
* @name toArray
* @function
* @return {Array} The matched snippets.
*/
, toArray () {
let res = []
, c = null
while (c = this.next()) {
res.push(c)
}
return res
}
/**
* reset
* Reset the index.
*
* @name reset
* @function
* @param {Number} i The new index (default: `0`).
* @return {Number} The new index.
*/
, reset (i) {
return this.regex.lastIndex = i || 0
}
}
};
| IonicaBizau/match-all |
<|start_filename|>baremaps-postgres/src/main/java/com/baremaps/postgres/jdbc/PostgresUtils.java<|end_filename|>
/*
* Copyright (C) 2020 The Baremaps Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.baremaps.postgres.jdbc;
import com.google.common.io.Resources;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import javax.sql.DataSource;
/** A helper class for creating data sources and executing queries. */
public final class PostgresUtils {
private PostgresUtils() {}
/**
* Creates a data source from a JDBC url with a pool size corresponding to the number of available
* processors.
*
* @param url the JDBC url
* @return the data source
*/
public static DataSource datasource(String url) {
return datasource(url, Runtime.getRuntime().availableProcessors());
}
/**
* Creates a data source from a JDBC url with a pool size defined by the user.
*
* @param url the JDBC url
* @param poolSize the pool size
* @return the data source
*/
public static DataSource datasource(String url, int poolSize) {
if (poolSize < 1) {
throw new IllegalArgumentException("PoolSize cannot be inferior to 1");
}
HikariConfig config = new HikariConfig();
config.setPoolName("BaremapsDataSource");
config.setJdbcUrl(url);
config.setMaximumPoolSize(poolSize);
return new HikariDataSource(config);
}
/**
* Executes the queries contained in a resource file.
*
* @param connection the JDBC connection
* @param resource the path of the resource file
* @throws IOException
* @throws SQLException
*/
public static void executeResource(Connection connection, String resource)
throws IOException, SQLException {
URL resourceURL = Resources.getResource(resource);
String queries = Resources.toString(resourceURL, StandardCharsets.UTF_8);
try (Statement statement = connection.createStatement()) {
statement.execute(queries);
}
}
}
<|start_filename|>baremaps-stream/src/main/java/com/baremaps/stream/BufferedSpliterator.java<|end_filename|>
/*
* Copyright (C) 2020 The Baremaps Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.baremaps.stream;
import java.util.Spliterator;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
/**
* A spliterator that buffers the completion of a spliterator of future elements and returns them
* according to a user defined order.
*
* <p>This code has been adapted from {@link <a
* href="https://github.com/palantir/streams/">streams</a>} licensed under the Apache License 2.0.
*
* <p>Copyright 2017 Palantir Technologies, Inc. All rights reserved.
*
* @param <T> the type of elements returned by this {@code Spliterator}
*/
class BufferedSpliterator<T> implements Spliterator<CompletableFuture<T>> {
private final CompletionOrder completionOrder;
private final Spliterator<CompletableFuture<T>> spliterator;
private final int bufferSize;
private final BlockingQueue<CompletableFuture<T>> buffer;
private int pending = 0;
/**
* Constructs a {@code BufferedSpliterator} from a spliterator of futures elements.
*
* @param spliterator the spliterator to buffer
* @param bufferSize the buffer size
* @param completionOrder the completion order
*/
public BufferedSpliterator(
Spliterator<CompletableFuture<T>> spliterator,
int bufferSize,
CompletionOrder completionOrder) {
this.spliterator = spliterator;
this.bufferSize = bufferSize;
this.buffer = new ArrayBlockingQueue<>(bufferSize);
this.completionOrder = completionOrder;
}
/** {@inheritDoc} */
@Override
public boolean tryAdvance(Consumer<? super CompletableFuture<T>> action) {
fillBuffer();
if (pending == 0) {
return false;
}
try {
CompletableFuture<T> future = buffer.take();
pending--;
action.accept(future);
return true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new StreamException(e);
}
}
/** {@inheritDoc} */
@Override
public Spliterator<CompletableFuture<T>> trySplit() {
return null;
}
/** {@inheritDoc} */
@Override
public long estimateSize() {
long estimate = pending + spliterator.estimateSize();
if (estimate < 0) {
return Long.MAX_VALUE;
}
return estimate;
}
/** {@inheritDoc} */
@Override
public int characteristics() {
return spliterator.characteristics();
}
private void fillBuffer() {
while (pending < bufferSize
&& spliterator.tryAdvance(
future -> completionOrder.registerCompletion(future, buffer::add))) {
pending++;
}
}
/** Represents the completion order applied to a {@code BufferedSpliterator}. */
public interface CompletionOrder {
<T> void registerCompletion(
CompletableFuture<T> future, Consumer<CompletableFuture<T>> resultConsumer);
}
/** An order that registers completions when futures are completed. */
enum InCompletionOrder implements CompletionOrder {
INSTANCE;
@Override
public <T> void registerCompletion(
CompletableFuture<T> future, Consumer<CompletableFuture<T>> resultConsumer) {
future.thenAccept(result -> resultConsumer.accept(future));
}
}
/** An order that registers completion according to the order of the source. */
enum InSourceOrder implements CompletionOrder {
INSTANCE;
@Override
public <T> void registerCompletion(
CompletableFuture<T> future, Consumer<CompletableFuture<T>> resultConsumer) {
resultConsumer.accept(future);
}
}
}
<|start_filename|>baremaps-tile/src/main/java/com/baremaps/tile/TileBatchPredicate.java<|end_filename|>
/*
* Copyright (C) 2020 The Baremaps Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.baremaps.tile;
import java.util.function.Predicate;
/** A predicate that filters tiles according to the index of a batch. */
public class TileBatchPredicate implements Predicate<Tile> {
private final int batchArraySize;
private final int batchArrayIndex;
/**
* Constructs a {@code TileBatchPredicate}.
*
* @param batchArraySize the size of the batch array
* @param batchArrayIndex the index of the batch in the array
*/
public TileBatchPredicate(int batchArraySize, int batchArrayIndex) {
this.batchArraySize = batchArraySize;
this.batchArrayIndex = batchArrayIndex;
}
/**
* Returns true if the tile belongs to the current batch.
*
* @param tile the tile
* @return the result
*/
@Override
public boolean test(Tile tile) {
return batchArraySize <= 1 || tile.index() % batchArraySize == batchArrayIndex;
}
}
<|start_filename|>baremaps-stream/src/main/java/com/baremaps/stream/ThrowingFunction.java<|end_filename|>
/*
* Copyright (C) 2020 The Baremaps Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.baremaps.stream;
import static java.util.Objects.requireNonNull;
import java.util.Optional;
import java.util.function.Function;
/**
* Represents a function that accepts one argument, produces a result or throws an exception
*
* @param <T> the type of the input to the function
* @param <R> the type of the result of the function
* @param <E> the type of the exception thrown by the function
*/
@FunctionalInterface
public interface ThrowingFunction<T, R, E extends Exception> {
/**
* Applies this function to the given argument.
*
* @param t the function argument
* @return the function result
* @throws E an exception
*/
R apply(T t) throws E;
/**
* Converts a {@code ThrowingFunction} into a {@code Function} that returns {@code Optional}
* elements which are empty in case of {@code Exception}.
*
* @param throwingFunction the throwing function
* @param <T> the type of the input to the function
* @param <R> the type of the result of the function
* @return the resulting function
*/
static <T, R> Function<T, Optional<R>> optional(
final ThrowingFunction<? super T, ? extends R, ?> throwingFunction) {
requireNonNull(throwingFunction);
return t -> {
try {
return Optional.ofNullable(throwingFunction.apply(t));
} catch (final Exception e) {
return Optional.empty();
}
};
}
/**
* Converts a {@code ThrowingFunction} into a {@code Function} that returns elements or throws
* unchecked exceptions in case of {@code Exception}.
*
* @param throwingFunction the throwing function
* @param <T> the type of the input to the function
* @param <R> the type of the result of the function
* @return the resulting function
*/
static <T, R> Function<T, R> unchecked(
final ThrowingFunction<? super T, ? extends R, ?> throwingFunction) {
requireNonNull(throwingFunction);
return t -> {
try {
return throwingFunction.apply(t);
} catch (Exception e) {
throw new RuntimeException(e);
}
};
}
}
<|start_filename|>baremaps-stream/src/main/java/com/baremaps/stream/SupplierUtils.java<|end_filename|>
/*
* Copyright (C) 2020 The Baremaps Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.baremaps.stream;
import java.util.function.Function;
import java.util.function.Supplier;
/** Utility methods for dealing with suppliers. */
public class SupplierUtils {
private SupplierUtils() {}
/**
* Returns a supplier that memoizes the result returned by another supplier.
*
* @param supplier the original supplier
* @param <T> the type of element returned by the supplier
* @return the memoized supplier
*/
public static <T> Supplier<T> memoize(Supplier<T> supplier) {
T value = supplier.get();
return () -> value;
}
/**
* Returns a supplier that memoizes the result returned by another supplier for a user defined
* time to live.
*
* @param supplier the original supplier
* @param timeToLiveMillis the time to live in milliseconds
* @param <T> the type of element returned by the supplier
* @return the memoized supplier
*/
public static <T> Supplier<T> memoize(Supplier<T> supplier, int timeToLiveMillis) {
return new Supplier() {
long t1 = System.currentTimeMillis();
T value = supplier.get();
@Override
public Object get() {
long t2 = System.currentTimeMillis();
if (t2 - t1 > timeToLiveMillis) {
t1 = t2;
value = supplier.get();
}
return value;
}
};
}
/**
* Converts a supplier to another supplier by applying a function.
*
* @param supplier the original supplier
* @param function the function to apply
* @param <T> the type of elements returned by the original supplier
* @param <R> the type of elements returned by the function
* @return the resulting supplier
*/
public static <T, R> Supplier<R> convert(Supplier<T> supplier, Function<T, R> function) {
return () -> function.apply(supplier.get());
}
}
<|start_filename|>baremaps-stream/src/main/java/com/baremaps/stream/StreamUtils.java<|end_filename|>
/*
* Copyright (C) 2020 The Baremaps Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.baremaps.stream;
import com.baremaps.stream.BufferedSpliterator.CompletionOrder;
import com.baremaps.stream.BufferedSpliterator.InCompletionOrder;
import com.baremaps.stream.BufferedSpliterator.InSourceOrder;
import java.util.Iterator;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/** Utility methods for creating parallel, buffered and batched streams of unknown size. */
public class StreamUtils {
/**
* Create an ordered sequential stream from an iterator of unknown size.
*
* @param iterator
* @param <T>
* @return a ordered sequential stream.
*/
public static <T> Stream<T> stream(Iterator<T> iterator) {
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false);
}
/**
* Parallelize the provided stream of unknown size.
*
* @param stream
* @param <T>
* @return a parallel stream
*/
public static <T> Stream<T> batch(Stream<T> stream) {
return batch(stream, 1);
}
/**
* Parallelize the provided stream of unknown size and split it according to the batch size.
*
* @param stream
* @param batchSize
* @param <T>
* @return a parallel stream
*/
public static <T> Stream<T> batch(Stream<T> stream, int batchSize) {
return StreamSupport.stream(new BatchedSpliterator<T>(stream.spliterator(), batchSize), true);
}
/**
* Buffer the completion of the provided asynchronous stream according to a completion strategy
* and a buffer size.
*
* @param asyncStream
* @param completionOrder
* @param <T>
* @return a buffered stream
*/
private static <T> Stream<CompletableFuture<T>> buffer(
Stream<CompletableFuture<T>> asyncStream, CompletionOrder completionOrder) {
return buffer(asyncStream, completionOrder, Runtime.getRuntime().availableProcessors());
}
/**
* Buffer the completion of the provided asynchronous stream according to a completion strategy
* and a buffer size.
*
* @param asyncStream
* @param completionOrder
* @param bufferSize
* @param <T>
* @return a buffered stream
*/
private static <T> Stream<CompletableFuture<T>> buffer(
Stream<CompletableFuture<T>> asyncStream, CompletionOrder completionOrder, int bufferSize) {
return StreamSupport.stream(
new BufferedSpliterator<>(asyncStream.spliterator(), bufferSize, completionOrder),
asyncStream.isParallel());
}
/**
* Buffer the completion of the provided asynchronous stream according to a buffer size.
*
* @param asyncStream
* @param bufferSize
* @param <T>
* @return a buffered stream
*/
public static <T> Stream<CompletableFuture<T>> bufferInCompletionOrder(
Stream<CompletableFuture<T>> asyncStream, int bufferSize) {
return buffer(asyncStream, InCompletionOrder.INSTANCE, bufferSize);
}
/**
* Buffer the completion of the provided asynchronous stream according to a buffer size.
*
* @param asyncStream
* @param bufferSize
* @param <T>
* @return a buffered stream
*/
public static <T> Stream<CompletableFuture<T>> bufferInSourceOrder(
Stream<CompletableFuture<T>> asyncStream, int bufferSize) {
return buffer(asyncStream, InSourceOrder.INSTANCE, bufferSize);
}
/**
* Buffer the asynchronous mapping of the provided stream according to a buffer size.
*
* @param stream
* @param asyncMapper
* @param bufferSize
* @param <T>
* @return a buffered stream
*/
private static <T, U> Stream<U> buffer(
Stream<T> stream,
Function<T, U> asyncMapper,
CompletionOrder completionOrder,
int bufferSize) {
Stream<CompletableFuture<U>> asyncStream =
stream.map(t -> CompletableFuture.supplyAsync(() -> asyncMapper.apply(t)));
return buffer(asyncStream, completionOrder, bufferSize)
.map(
f -> {
try {
return f.get();
} catch (InterruptedException | ExecutionException e) {
Thread.currentThread().interrupt();
throw new StreamException(e);
}
});
}
/**
* Buffer the asynchronous mapping of the provided stream according to a buffer size.
*
* @param stream
* @param asyncMapper
* @param bufferSize
* @param <T>
* @return a buffered stream
*/
public static <T, U> Stream<U> bufferInCompletionOrder(
Stream<T> stream, Function<T, U> asyncMapper, int bufferSize) {
return buffer(stream, asyncMapper, InCompletionOrder.INSTANCE, bufferSize);
}
/**
* Buffer the asynchronous mapping of the provided stream according to a buffer size.
*
* @param stream
* @param asyncMapper
* @param bufferSize
* @param <T>
* @return a buffered stream
*/
public static <T, U> Stream<U> bufferInSourceOrder(
Stream<T> stream, Function<T, U> asyncMapper, int bufferSize) {
return buffer(stream, asyncMapper, InSourceOrder.INSTANCE, bufferSize);
}
/** Partition the provided stream according to a partition size. */
public static <T> Stream<Stream<T>> partition(Stream<T> stream, int partitionSize) {
return StreamSupport.stream(
new PartitionedSpliterator<T>(stream.spliterator(), partitionSize), stream.isParallel());
}
}
| halset/baremaps |
<|start_filename|>example/lib/views/h2dart_page.dart<|end_filename|>
import 'package:flutter/material.dart';
import 'package:flutter_jscore_example/utils/h2dart.dart';
/// 头文件转dart
class H2DartPage extends StatefulWidget {
@override
State<StatefulWidget> createState() {
return H2DartPageState();
}
}
class H2DartPageState extends State<H2DartPage> {
late TextEditingController _hController;
late TextEditingController _dartController;
@override
void initState() {
_hController = TextEditingController();
_dartController = TextEditingController();
super.initState();
}
@override
void dispose() {
_hController.dispose();
_dartController.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('.h to .dart'),
actions: <Widget>[
IconButton(
icon: Icon(Icons.autorenew),
onPressed: () {
_dartController.text = h2Dart(_hController.text);
},
),
],
),
body: ListView(
children: <Widget>[
Text('.h'),
TextField(
controller: _hController,
maxLines: 20,
style: TextStyle(
fontSize: 10.0,
),
),
Text('.dart'),
TextField(
controller: _dartController,
maxLines: 20,
style: TextStyle(
fontSize: 10.0,
),
),
],
),
);
}
}
<|start_filename|>example/lib/main.dart<|end_filename|>
import 'package:flutter/material.dart';
import 'package:flutter_jscore_example/views/h2dart_page.dart';
import 'package:flutter_jscore_example/views/bindings_page.dart';
import 'package:flutter_jscore_example/views/jscore_page.dart';
void main() => runApp(MyApp());
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'flutter_jscore',
home: FlutterJsCorePage(),
);
}
}
class FlutterJsCorePage extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('JavaScriptCore for Flutter'),
),
body: ListView(
children: <Widget>[
ListTile(
title: Text('H2Dart tool'),
subtitle: Text('C .h file to dart file'),
onTap: () {
Navigator.of(context).push(MaterialPageRoute(builder: (context) {
return H2DartPage();
}));
},
),
Divider(
height: 0.5,
thickness: 0.5,
),
ListTile(
title: Text('Bindings page'),
subtitle: Text('Dart binding C'),
onTap: () {
Navigator.of(context).push(MaterialPageRoute(builder: (context) {
return BindingsPage();
}));
},
),
Divider(
height: 0.5,
thickness: 0.5,
),
ListTile(
title: Text('JsCore page'),
subtitle: Text('Use flutter_jscore lib'),
onTap: () {
Navigator.of(context).push(MaterialPageRoute(builder: (context) {
return JsCorePage();
}));
},
),
],
),
);
}
}
<|start_filename|>lib/jscore/js_property_name_array.dart<|end_filename|>
import 'dart:ffi';
import 'package:flutter_jscore/binding/js_object_ref.dart' as JSObjectRef;
import 'js_string.dart';
/// An array of JavaScript property names.
class JSPropertyNameArray {
/// C pointer
Pointer pointer;
JSPropertyNameArray(this.pointer);
/// Retains a JavaScript property name array.
void retain() {
pointer = JSObjectRef.jSPropertyNameArrayRetain(pointer);
}
/// Releases a JavaScript property name array.
void release() {
JSObjectRef.jSPropertyNameArrayRelease(pointer);
}
/// Gets a count of the number of items in a JavaScript property name array.
int get count {
return JSObjectRef.jSPropertyNameArrayGetCount(pointer);
}
/// Gets a property name at a given index in a JavaScript property name array.
/// [index] (size_t) The index of the property name to retrieve.
String propertyNameArrayGetNameAtIndex(int index) {
return JSString(
JSObjectRef.jSPropertyNameArrayGetNameAtIndex(pointer, index))
.string!;
}
}
<|start_filename|>linux/include/flutter_jscore/flutter_jscore_plugin.h<|end_filename|>
#ifndef FLUTTER_PLUGIN_FLUTTER_JSCORE_PLUGIN_H_
#define FLUTTER_PLUGIN_FLUTTER_JSCORE_PLUGIN_H_
#include <flutter_linux/flutter_linux.h>
G_BEGIN_DECLS
#ifdef FLUTTER_PLUGIN_IMPL
#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default")))
#else
#define FLUTTER_PLUGIN_EXPORT
#endif
typedef struct _FlutterJscorePlugin FlutterJscorePlugin;
typedef struct {
GObjectClass parent_class;
} FlutterJscorePluginClass;
FLUTTER_PLUGIN_EXPORT GType flutter_jscore_plugin_get_type();
FLUTTER_PLUGIN_EXPORT void flutter_jscore_plugin_register_with_registrar(
FlPluginRegistrar* registrar);
G_END_DECLS
#endif // FLUTTER_PLUGIN_FLUTTER_JSCORE_PLUGIN_H_
<|start_filename|>example/lib/views/bindings_page.dart<|end_filename|>
import 'dart:ffi';
import 'dart:typed_data';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:flutter_jscore/jscore_bindings.dart';
import 'package:ffi/ffi.dart';
class BindingsPage extends StatefulWidget {
@override
State<StatefulWidget> createState() {
return BindingsPageState();
}
}
class BindingsPageState extends State<BindingsPage> {
// 输入控制器
late TextEditingController _jsInputController;
// 结果
String? _result;
// Jsc上下文
late Pointer contextGroup;
late Pointer globalContext;
late Pointer globalObject;
@override
void initState() {
super.initState();
// 创建js上下文
contextGroup = jSContextGroupCreate();
globalContext = jSGlobalContextCreateInGroup(contextGroup, nullptr);
globalObject = jSContextGetGlobalObject(globalContext);
// 注册alert方法
_alertDartFunc = _alert;
Pointer<Utf8> funcNameCString = 'alert'.toNativeUtf8();
var functionObject = jSObjectMakeFunctionWithCallback(
globalContext,
jSStringCreateWithUTF8CString(funcNameCString),
Pointer.fromFunction(alert));
jSObjectSetProperty(
globalContext,
globalObject,
jSStringCreateWithUTF8CString(funcNameCString),
functionObject,
JSPropertyAttributes.kJSPropertyAttributeNone,
nullptr);
malloc.free(funcNameCString);
// 注册flutter.print静态方法
_printDartFunc = _print;
var staticFunctions = JSStaticFunctionPointer.allocateArray([
JSStaticFunctionStruct(
name: 'print'.toNativeUtf8(),
callAsFunction: Pointer.fromFunction(flutterPrint),
attributes: JSPropertyAttributes.kJSPropertyAttributeNone,
),
]);
var definition = JSClassDefinitionPointer.allocate(
version: 0,
attributes: JSClassAttributes.kJSClassAttributeNone,
className: 'flutter'.toNativeUtf8(),
parentClass: null,
staticValues: null,
staticFunctions: staticFunctions,
initialize: null,
finalize: null,
hasProperty: null,
getProperty: null,
setProperty: null,
deleteProperty: null,
getPropertyNames: null,
callAsFunction: null,
callAsConstructor: null,
hasInstance: null,
convertToType: null,
);
var flutterJSClass = jSClassCreate(definition);
var flutterJSObject = jSObjectMake(globalContext, flutterJSClass, nullptr);
Pointer<Utf8> flutterCString = 'flutter'.toNativeUtf8();
jSObjectSetProperty(
globalContext,
globalObject,
jSStringCreateWithUTF8CString(flutterCString),
flutterJSObject,
JSPropertyAttributes.kJSPropertyAttributeDontDelete,
nullptr);
malloc.free(flutterCString);
// 设置默认JavaScript脚本
_jsInputController = TextEditingController(text: '''
function helloJsCore()
{
var years = 2000 + 20;
alert('Hello JavaScriptCore!', years);
flutter.print('Hello JavaScriptCore!');
return 'JSCore' + (2000 + 20);
}
helloJsCore();
''');
}
@override
void dispose() {
_jsInputController.dispose();
// 释放js上下文
jSContextGroupRelease(contextGroup);
super.dispose();
}
/// 绑定JavaScript alert()函数
static Pointer alert(
Pointer ctx,
Pointer function,
Pointer thisObject,
int argumentCount,
Pointer<Pointer> arguments,
Pointer<Pointer> exception) {
if (_alertDartFunc != null) {
_alertDartFunc!(
ctx, function, thisObject, argumentCount, arguments, exception);
}
return nullptr;
}
static JSObjectCallAsFunctionCallbackDart? _alertDartFunc;
Pointer _alert(
Pointer ctx,
Pointer function,
Pointer thisObject,
int argumentCount,
Pointer<Pointer> arguments,
Pointer<Pointer> exception) {
String msg = 'No Message';
if (argumentCount != 0) {
msg = '';
for (int i = 0; i < argumentCount; i++) {
if (i != 0) {
msg += '\n';
}
var jsValueRef = arguments[i];
msg += _getJsValue(jsValueRef);
}
}
showDialog(
context: context,
builder: (context) {
return AlertDialog(
title: Text('Alert'),
content: Text(msg),
);
});
return nullptr;
}
/// 绑定flutter.print()函数
static Pointer flutterPrint(
Pointer ctx,
Pointer function,
Pointer thisObject,
int argumentCount,
Pointer<Pointer> arguments,
Pointer<Pointer> exception) {
if (_printDartFunc != null) {
_printDartFunc!(
ctx, function, thisObject, argumentCount, arguments, exception);
}
return nullptr;
}
static JSObjectCallAsFunctionCallbackDart? _printDartFunc;
Pointer _print(
Pointer ctx,
Pointer function,
Pointer thisObject,
int argumentCount,
Pointer<Pointer> arguments,
Pointer<Pointer> exception) {
if (argumentCount > 0) {
print(_getJsValue(arguments[0]));
}
return nullptr;
}
// 运行JavaScript脚本
String _runJs(String script) {
// 运行JavaScript脚本
Pointer<Utf8> scriptCString = script.toNativeUtf8();
var jsValueRef = jSEvaluateScript(
globalContext,
jSStringCreateWithUTF8CString(scriptCString),
nullptr,
nullptr,
1,
nullptr);
malloc.free(scriptCString);
// 获取返回结果
String result = _getJsValue(jsValueRef);
return result;
}
// 获取JsValue的值
String _getJsValue(Pointer jsValueRef) {
if (jSValueIsNull(globalContext, jsValueRef) == 1) {
return 'null';
} else if (jSValueIsUndefined(globalContext, jsValueRef) == 1) {
return 'undefined';
}
var resultJsString =
jSValueToStringCopy(globalContext, jsValueRef, nullptr);
var resultCString = jSStringGetCharactersPtr(resultJsString);
int resultCStringLength = jSStringGetLength(resultJsString);
if (resultCString == nullptr) {
return 'null';
}
String result = String.fromCharCodes(Uint16List.view(
resultCString.cast<Uint16>().asTypedList(resultCStringLength).buffer,
0,
resultCStringLength));
jSStringRelease(resultJsString);
return result;
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('JavaScriptCore for Flutter'),
),
body: ListView(
padding: EdgeInsets.only(top: 10.0),
children: <Widget>[
Container(
margin: EdgeInsets.only(left: 10.0, right: 10.0),
child: Text('JavaScript:'),
),
Container(
margin: EdgeInsets.all(10.0),
padding: EdgeInsets.all(5.0),
decoration: BoxDecoration(
border:
Border.all(width: 1.0, color: Theme.of(context).primaryColor),
borderRadius: BorderRadius.all(new Radius.circular(5.0)),
),
child: TextField(
controller: _jsInputController,
maxLines: 30,
style: TextStyle(
fontSize: 12.0,
),
decoration: InputDecoration(
border: InputBorder.none,
),
),
),
Container(
margin: EdgeInsets.only(left: 10.0, right: 10.0),
child: Text('Result: ${_result ?? ''}'),
),
],
),
floatingActionButton: FloatingActionButton(
onPressed: () {
setState(() {
_result = _runJs(_jsInputController.text);
});
},
child: Icon(Icons.autorenew),
),
);
}
}
<|start_filename|>linux/flutter_jscore_plugin.cc<|end_filename|>
#include "include/flutter_jscore/flutter_jscore_plugin.h"
#include <flutter_linux/flutter_linux.h>
#include <gtk/gtk.h>
#include <sys/utsname.h>
#include <cstring>
#define FLUTTER_JSCORE_PLUGIN(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), flutter_jscore_plugin_get_type(), \
FlutterJscorePlugin))
struct _FlutterJscorePlugin {
GObject parent_instance;
};
G_DEFINE_TYPE(FlutterJscorePlugin, flutter_jscore_plugin, g_object_get_type())
// Called when a method call is received from Flutter.
static void flutter_jscore_plugin_handle_method_call(
FlutterJscorePlugin* self,
FlMethodCall* method_call) {
g_autoptr(FlMethodResponse) response = nullptr;
const gchar* method = fl_method_call_get_name(method_call);
if (strcmp(method, "getPlatformVersion") == 0) {
struct utsname uname_data = {};
uname(&uname_data);
g_autofree gchar *version = g_strdup_printf("Linux %s", uname_data.version);
g_autoptr(FlValue) result = fl_value_new_string(version);
response = FL_METHOD_RESPONSE(fl_method_success_response_new(result));
} else {
response = FL_METHOD_RESPONSE(fl_method_not_implemented_response_new());
}
fl_method_call_respond(method_call, response, nullptr);
}
static void flutter_jscore_plugin_dispose(GObject* object) {
G_OBJECT_CLASS(flutter_jscore_plugin_parent_class)->dispose(object);
}
static void flutter_jscore_plugin_class_init(FlutterJscorePluginClass* klass) {
G_OBJECT_CLASS(klass)->dispose = flutter_jscore_plugin_dispose;
}
static void flutter_jscore_plugin_init(FlutterJscorePlugin* self) {}
static void method_call_cb(FlMethodChannel* channel, FlMethodCall* method_call,
gpointer user_data) {
FlutterJscorePlugin* plugin = FLUTTER_JSCORE_PLUGIN(user_data);
flutter_jscore_plugin_handle_method_call(plugin, method_call);
}
void flutter_jscore_plugin_register_with_registrar(FlPluginRegistrar* registrar) {
FlutterJscorePlugin* plugin = FLUTTER_JSCORE_PLUGIN(
g_object_new(flutter_jscore_plugin_get_type(), nullptr));
g_autoptr(FlStandardMethodCodec) codec = fl_standard_method_codec_new();
g_autoptr(FlMethodChannel) channel =
fl_method_channel_new(fl_plugin_registrar_get_messenger(registrar),
"flutter_jscore",
FL_METHOD_CODEC(codec));
fl_method_channel_set_method_call_handler(channel, method_call_cb,
g_object_ref(plugin),
g_object_unref);
g_object_unref(plugin);
}
<|start_filename|>example/linux/flutter/generated_plugin_registrant.cc<|end_filename|>
//
// Generated file. Do not edit.
//
#include "generated_plugin_registrant.h"
#include <flutter_jscore/flutter_jscore_plugin.h>
void fl_register_plugins(FlPluginRegistry* registry) {
g_autoptr(FlPluginRegistrar) flutter_jscore_registrar =
fl_plugin_registry_get_registrar_for_plugin(registry, "FlutterJscorePlugin");
flutter_jscore_plugin_register_with_registrar(flutter_jscore_registrar);
}
<|start_filename|>lib/jscore/js_object.dart<|end_filename|>
import 'dart:ffi';
import 'package:ffi/ffi.dart';
import 'package:flutter_jscore/binding/js_base.dart' as JSBase;
import 'package:flutter_jscore/binding/js_object_ref.dart' as JSObjectRef;
import 'package:flutter_jscore/binding/js_typed_array.dart' as JSTypedArray;
import 'package:flutter_jscore/jscore/js_string.dart';
import 'js_class.dart';
import 'js_context.dart';
import 'js_property_name_accumulator.dart';
import 'js_property_name_array.dart';
import 'js_value.dart';
/// typedef JSObjectInitializeCallback
/// The callback invoked when an object is first created.
/// [ctx] The execution context to use.
/// [object] The JSObject being created.
/// If you named your function Initialize, you would declare it like this:
///
/// void Initialize(JSContextRef ctx, JSObjectRef object);
///
/// Unlike the other object callbacks, the initialize callback is called on the least
/// derived class (the parent class) first, and the most derived class last.
/// typedef void (*JSObjectInitializeCallback) (JSContextRef ctx, JSObjectRef object);
typedef JSObjectInitializeCallbackDart = void Function(
Pointer ctx, Pointer object);
/// typedef JSObjectFinalizeCallback
/// The callback invoked when an object is finalized (prepared for garbage collection). An object may be finalized on any thread.
/// [object] The JSObject being finalized.
/// If you named your function Finalize, you would declare it like this:
///
/// void Finalize(JSObjectRef object);
///
/// The finalize callback is called on the most derived class first, and the least
/// derived class (the parent class) last.
///
/// You must not call any function that may cause a garbage collection or an allocation
/// of a garbage collected object from within a JSObjectFinalizeCallback. This includes
/// all functions that have a JSContextRef parameter.
/// typedef void (*JSObjectFinalizeCallback) (JSObjectRef object);
typedef JSObjectFinalizeCallbackDart = void Function(Pointer object);
/// typedef JSObjectHasPropertyCallback
/// The callback invoked when determining whether an object has a property.
/// [ctx] The execution context to use.
/// [object] The JSObject to search for the property.
/// [propertyName] A JSString containing the name of the property look up.
/// [@result] true if object has the property, otherwise false.
/// If you named your function HasProperty, you would declare it like this:
///
/// bool HasProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName);
///
/// If this function returns false, the hasProperty request forwards to object's statically declared properties, then its parent class chain (which includes the default object class), then its prototype chain.
///
/// This callback enables optimization in cases where only a property's existence needs to be known, not its value, and computing its value would be expensive.
///
/// If this callback is NULL, the getProperty callback will be used to service hasProperty requests.
/// typedef bool (*JSObjectHasPropertyCallback) (JSContextRef ctx, JSObjectRef object, JSStringRef propertyName);
typedef JSObjectHasPropertyCallbackDart = int Function(
Pointer ctx, Pointer object, Pointer propertyName);
/// typedef JSObjectGetPropertyCallback
/// The callback invoked when getting a property's value.
/// [ctx] The execution context to use.
/// [object] The JSObject to search for the property.
/// [propertyName] A JSString containing the name of the property to get.
/// [exception] A pointer to a JSValueRef in which to return an exception, if any.
/// [@result] The property's value if object has the property, otherwise NULL.
/// If you named your function GetProperty, you would declare it like this:
///
/// JSValueRef GetProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName, JSValueRef* exception);
///
/// If this function returns NULL, the get request forwards to object's statically declared properties, then its parent class chain (which includes the default object class), then its prototype chain.
/// typedef JSValueRef (*JSObjectGetPropertyCallback) (JSContextRef ctx, JSObjectRef object, JSStringRef propertyName, JSValueRef* exception);
typedef JSObjectGetPropertyCallbackDart = Pointer Function(Pointer ctx,
Pointer object, Pointer propertyName, Pointer<Pointer> exception);
/// typedef JSObjectSetPropertyCallback
/// The callback invoked when setting a property's value.
/// [ctx] The execution context to use.
/// [object] The JSObject on which to set the property's value.
/// [propertyName] A JSString containing the name of the property to set.
/// [value] A JSValue to use as the property's value.
/// [exception] A pointer to a JSValueRef in which to return an exception, if any.
/// [@result] true if the property was set, otherwise false.
/// If you named your function SetProperty, you would declare it like this:
///
/// bool SetProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName, JSValueRef value, JSValueRef* exception);
///
/// If this function returns false, the set request forwards to object's statically declared properties, then its parent class chain (which includes the default object class).
/// typedef bool (*JSObjectSetPropertyCallback) (JSContextRef ctx, JSObjectRef object, JSStringRef propertyName, JSValueRef value, JSValueRef* exception);
typedef JSObjectSetPropertyCallbackDart = int Function(
Pointer ctx,
Pointer object,
Pointer propertyName,
Pointer value,
Pointer<Pointer> exception);
/// typedef JSObjectDeletePropertyCallback
/// The callback invoked when deleting a property.
/// [ctx] The execution context to use.
/// [object] The JSObject in which to delete the property.
/// [propertyName] A JSString containing the name of the property to delete.
/// [exception] A pointer to a JSValueRef in which to return an exception, if any.
/// [@result] true if propertyName was successfully deleted, otherwise false.
/// If you named your function DeleteProperty, you would declare it like this:
///
/// bool DeleteProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName, JSValueRef* exception);
///
/// If this function returns false, the delete request forwards to object's statically declared properties, then its parent class chain (which includes the default object class).
/// typedef bool (*JSObjectDeletePropertyCallback) (JSContextRef ctx, JSObjectRef object, JSStringRef propertyName, JSValueRef* exception);
typedef JSObjectDeletePropertyCallbackDart = int Function(Pointer ctx,
Pointer object, Pointer propertyName, Pointer<Pointer> exception);
/// typedef JSObjectGetPropertyNamesCallback
/// The callback invoked when collecting the names of an object's properties.
/// [ctx] The execution context to use.
/// [object] The JSObject whose property names are being collected.
/// [propertyNames] A JavaScript property name accumulator in which to accumulate the names of object's properties.
/// If you named your function GetPropertyNames, you would declare it like this:
///
/// void GetPropertyNames(JSContextRef ctx, JSObjectRef object, JSPropertyNameAccumulatorRef propertyNames);
///
/// Property name accumulators are used by JSObjectCopyPropertyNames and JavaScript for...in loops.
///
/// Use JSPropertyNameAccumulatorAddName to add property names to accumulator. A class's getPropertyNames callback only needs to provide the names of properties that the class vends through a custom getProperty or setProperty callback. Other properties, including statically declared properties, properties vended by other classes, and properties belonging to object's prototype, are added independently.
/// typedef void (*JSObjectGetPropertyNamesCallback) (JSContextRef ctx, JSObjectRef object, JSPropertyNameAccumulatorRef propertyNames);
typedef JSObjectGetPropertyNamesCallbackDart = void Function(
Pointer ctx, Pointer object, Pointer propertyNames);
/// typedef JSObjectCallAsFunctionCallback
/// The callback invoked when an object is called as a function.
/// [ctx] The execution context to use.
/// [function] A JSObject that is the function being called.
/// [thisObject] A JSObject that is the 'this' variable in the function's scope.
/// [argumentCount] An integer count of the number of arguments in arguments.
/// [arguments] A JSValue array of the arguments passed to the function.
/// [exception] A pointer to a JSValueRef in which to return an exception, if any.
/// [@result] A JSValue that is the function's return value.
/// If you named your function CallAsFunction, you would declare it like this:
///
/// JSValueRef CallAsFunction(JSContextRef ctx, JSObjectRef function, JSObjectRef thisObject, size_t argumentCount, const JSValueRef arguments[], JSValueRef* exception);
///
/// If your callback were invoked by the JavaScript expression 'myObject.myFunction()', function would be set to myFunction, and thisObject would be set to myObject.
///
/// If this callback is NULL, calling your object as a function will throw an exception.
/// typedef JSValueRef (*JSObjectCallAsFunctionCallback) (JSContextRef ctx, JSObjectRef function, JSObjectRef thisObject, size_t argumentCount, const JSValueRef arguments[], JSValueRef* exception);
typedef JSObjectCallAsFunctionCallbackDart = Pointer Function(
Pointer ctx,
Pointer function,
Pointer thisObject,
int argumentCount,
Pointer<Pointer> arguments,
Pointer<Pointer> exception);
/// typedef JSObjectCallAsConstructorCallback
/// The callback invoked when an object is used as a constructor in a 'new' expression.
/// [ctx] The execution context to use.
/// [constructor] A JSObject that is the constructor being called.
/// [argumentCount] An integer count of the number of arguments in arguments.
/// [arguments] A JSValue array of the arguments passed to the function.
/// [exception] A pointer to a JSValueRef in which to return an exception, if any.
/// [@result] A JSObject that is the constructor's return value.
/// If you named your function CallAsConstructor, you would declare it like this:
///
/// JSObjectRef CallAsConstructor(JSContextRef ctx, JSObjectRef constructor, size_t argumentCount, const JSValueRef arguments[], JSValueRef* exception);
///
/// If your callback were invoked by the JavaScript expression 'new myConstructor()', constructor would be set to myConstructor.
///
/// If this callback is NULL, using your object as a constructor in a 'new' expression will throw an exception.
/// typedef JSObjectRef (*JSObjectCallAsConstructorCallback) (JSContextRef ctx, JSObjectRef constructor, size_t argumentCount, const JSValueRef arguments[], JSValueRef* exception);
typedef JSObjectCallAsConstructorCallbackDart = Pointer Function(
Pointer ctx,
Pointer constructor,
int argumentCount,
Pointer<Pointer> arguments,
Pointer<Pointer> exception);
/// typedef JSObjectHasInstanceCallback
/// hasInstance The callback invoked when an object is used as the target of an 'instanceof' expression.
/// [ctx] The execution context to use.
/// [constructor] The JSObject that is the target of the 'instanceof' expression.
/// [possibleInstance] The JSValue being tested to determine if it is an instance of constructor.
/// [exception] A pointer to a JSValueRef in which to return an exception, if any.
/// [@result] true if possibleInstance is an instance of constructor, otherwise false.
/// If you named your function HasInstance, you would declare it like this:
///
/// bool HasInstance(JSContextRef ctx, JSObjectRef constructor, JSValueRef possibleInstance, JSValueRef* exception);
///
/// If your callback were invoked by the JavaScript expression 'someValue instanceof myObject', constructor would be set to myObject and possibleInstance would be set to someValue.
///
/// If this callback is NULL, 'instanceof' expressions that target your object will return false.
///
/// Standard JavaScript practice calls for objects that implement the callAsConstructor callback to implement the hasInstance callback as well.
/// typedef bool (*JSObjectHasInstanceCallback) (JSContextRef ctx, JSObjectRef constructor, JSValueRef possibleInstance, JSValueRef* exception);
typedef JSObjectHasInstanceCallbackDart = int Function(Pointer ctx,
Pointer constructor, Pointer possibleInstance, Pointer<Pointer> exception);
/// enum JSPropertyAttributes
/// A set of JSPropertyAttributes. Combine multiple attributes by logically ORing them together.
enum JSPropertyAttributes {
/// Specifies that a property has no special attributes.
kJSPropertyAttributeNone,
/// Specifies that a property is read-only.
kJSPropertyAttributeReadOnly,
/// Specifies that a property should not be enumerated by JSPropertyEnumerators and JavaScript for...in loops.
kJSPropertyAttributeDontEnum,
/// Specifies that the delete operation should fail on a property.
kJSPropertyAttributeDontDelete
}
/// enum JSClassAttributes
/// A set of JSClassAttributes. Combine multiple attributes by logically ORing them together.
enum JSClassAttributes {
/// kJSClassAttributeNone Specifies that a class has no special attributes.
kJSClassAttributeNone,
/// kJSClassAttributeNoAutomaticPrototype Specifies that a class should not automatically generate a shared prototype for its instance objects. Use kJSClassAttributeNoAutomaticPrototype in combination with JSObjectSetPrototype to manage prototypes manually.
kJSClassAttributeNoAutomaticPrototype,
}
/// enum JSPropertyAttributes to C enum
int jSPropertyAttributesToCEnum(JSPropertyAttributes type) {
switch (type) {
case JSPropertyAttributes.kJSPropertyAttributeReadOnly:
return JSObjectRef.JSPropertyAttributes.kJSPropertyAttributeReadOnly;
case JSPropertyAttributes.kJSPropertyAttributeDontEnum:
return JSObjectRef.JSPropertyAttributes.kJSPropertyAttributeDontEnum;
case JSPropertyAttributes.kJSPropertyAttributeDontDelete:
return JSObjectRef.JSPropertyAttributes.kJSPropertyAttributeDontDelete;
default:
return JSObjectRef.JSPropertyAttributes.kJSPropertyAttributeNone;
}
}
/// C enum to enum JSPropertyAttributes
JSPropertyAttributes cEnumToJSPropertyAttributes(int typeCode) {
switch (typeCode) {
case JSObjectRef.JSPropertyAttributes.kJSPropertyAttributeReadOnly:
return JSPropertyAttributes.kJSPropertyAttributeReadOnly;
case JSObjectRef.JSPropertyAttributes.kJSPropertyAttributeDontEnum:
return JSPropertyAttributes.kJSPropertyAttributeDontEnum;
case JSObjectRef.JSPropertyAttributes.kJSPropertyAttributeDontDelete:
return JSPropertyAttributes.kJSPropertyAttributeDontDelete;
default:
return JSPropertyAttributes.kJSPropertyAttributeNone;
}
}
/// enum JSClassAttributes to C enum
int jSClassAttributesToCEnum(JSClassAttributes type) {
switch (type) {
case JSClassAttributes.kJSClassAttributeNoAutomaticPrototype:
return JSObjectRef
.JSClassAttributes.kJSClassAttributeNoAutomaticPrototype;
default:
return JSObjectRef.JSClassAttributes.kJSClassAttributeNone;
}
}
/// C enum to enum JSClassAttributes
JSClassAttributes cEnumToJSClassAttributes(int typeCode) {
switch (typeCode) {
case JSObjectRef.JSClassAttributes.kJSClassAttributeNoAutomaticPrototype:
return JSClassAttributes.kJSClassAttributeNoAutomaticPrototype;
default:
return JSClassAttributes.kJSClassAttributeNone;
}
}
/// struct JSStaticValue
/// This structure describes a statically declared value property.
class JSStaticValue {
/// Property's name.
String name;
/// A JSObjectGetPropertyCallback to invoke when getting the property's value.
Pointer<NativeFunction<JSObjectRef.JSObjectGetPropertyCallback>>? getProperty;
/// A JSObjectSetPropertyCallback to invoke when setting the property's value. May be NULL if the ReadOnly attribute is set.
Pointer<NativeFunction<JSObjectRef.JSObjectSetPropertyCallback>>? setProperty;
/// (unsigned) A logically ORed set of [JSPropertyAttributes] to give to the property.
JSPropertyAttributes attributes;
JSStaticValue({
required this.name,
this.getProperty,
this.setProperty,
this.attributes = JSPropertyAttributes.kJSPropertyAttributeNone,
});
Pointer<JSObjectRef.JSStaticValue> create() {
return JSObjectRef.JSStaticValuePointer.allocate(
JSObjectRef.JSStaticValueStruct(
name: name.toNativeUtf8(),
getProperty: getProperty ?? nullptr,
setProperty: setProperty ?? nullptr,
attributes: jSPropertyAttributesToCEnum(attributes),
));
}
JSObjectRef.JSStaticValueStruct toStruct() {
return JSObjectRef.JSStaticValueStruct(
name: name.toNativeUtf8(),
getProperty: getProperty ?? nullptr,
setProperty: setProperty ?? nullptr,
attributes: jSPropertyAttributesToCEnum(attributes),
);
}
}
extension JSStaticValueArray on List<JSStaticValue> {
Pointer<JSObjectRef.JSStaticValue> createArray() {
return JSObjectRef.JSStaticValuePointer.allocateArray(
this.map((e) => e.toStruct()).toList());
}
}
/// struct JSStaticFunction
/// This structure describes a statically declared function property.
class JSStaticFunction {
/// Property's name.
String name;
/// A JSObjectCallAsFunctionCallback to invoke when the property is called as a function.
Pointer<NativeFunction<JSObjectRef.JSObjectCallAsFunctionCallback>>?
callAsFunction;
/// A logically ORed set of [JSPropertyAttributes] to give to the property.
JSPropertyAttributes attributes;
JSStaticFunction({
required this.name,
this.callAsFunction,
this.attributes = JSPropertyAttributes.kJSPropertyAttributeNone,
});
Pointer<JSObjectRef.JSStaticFunction> create() {
return JSObjectRef.JSStaticFunctionPointer.allocate(
JSObjectRef.JSStaticFunctionStruct(
name: name.toNativeUtf8(),
callAsFunction: callAsFunction ?? nullptr,
attributes: jSPropertyAttributesToCEnum(attributes),
));
}
JSObjectRef.JSStaticFunctionStruct toStruct() {
return JSObjectRef.JSStaticFunctionStruct(
name: name.toNativeUtf8(),
callAsFunction: callAsFunction ?? nullptr,
attributes: jSPropertyAttributesToCEnum(attributes),
);
}
}
extension JSStaticFunctionArray on List<JSStaticFunction> {
Pointer<JSObjectRef.JSStaticFunction> createArray() {
return JSObjectRef.JSStaticFunctionPointer.allocateArray(
this.map((e) => e.toStruct()).toList());
}
}
/// struct JSStaticFunction
/// This structure contains properties and callbacks that define a type of object. All fields other than the version field are optional. Any pointer may be NULL.
/// The staticValues and staticFunctions arrays are the simplest and most efficient means for vending custom properties. Statically declared properties autmatically service requests like getProperty, setProperty, and getPropertyNames. Property access callbacks are required only to implement unusual properties, like array indexes, whose names are not known at compile-time.
///
/// If you named your getter function "GetX" and your setter function "SetX", you would declare a JSStaticValue array containing "X" like this:
///
/// JSStaticValue StaticValueArray[] = {
// { "X", GetX, SetX, kJSPropertyAttributeNone },
// { 0, 0, 0, 0 }
// };
/// Standard JavaScript practice calls for storing function objects in prototypes, so they can be shared. The default JSClass created by JSClassCreate follows this idiom, instantiating objects with a shared, automatically generating prototype containing the class's function objects. The kJSClassAttributeNoAutomaticPrototype attribute specifies that a JSClass should not automatically generate such a prototype. The resulting JSClass instantiates objects with the default object prototype, and gives each instance object its own copy of the class's function objects.
//
// A NULL callback specifies that the default object callback should substitute, except in the case of hasProperty, where it specifies that getProperty should substitute.
class JSClassDefinition {
/// The version number of this structure. The current version is 0.
int version;
/// A logically ORed set of [JSClassAttributes] to give to the class.
JSClassAttributes attributes;
/// A null-terminated UTF8 string containing the class's name.
String className;
/// A JSClass to set as the class's parent class. Pass NULL use the default object class.
JSClass? parentClass;
/// A JSStaticValue array containing the class's statically declared value properties. Pass NULL to specify no statically declared value properties. The array must be terminated by a JSStaticValue whose name field is NULL.
List<JSStaticValue>? staticValues;
/// A JSStaticFunction array containing the class's statically declared function properties. Pass NULL to specify no statically declared function properties. The array must be terminated by a JSStaticFunction whose name field is NULL.
List<JSStaticFunction>? staticFunctions;
/// The callback invoked when an object is first created. Use this callback to initialize the object.
Pointer<NativeFunction<JSObjectRef.JSObjectInitializeCallback>>? initialize;
/// The callback invoked when an object is finalized (prepared for garbage collection). Use this callback to release resources allocated for the object, and perform other cleanup.
Pointer<NativeFunction<JSObjectRef.JSObjectFinalizeCallback>>? finalize;
/// The callback invoked when determining whether an object has a property. If this field is NULL, getProperty is called instead. The hasProperty callback enables optimization in cases where only a property's existence needs to be known, not its value, and computing its value is expensive.
Pointer<NativeFunction<JSObjectRef.JSObjectHasPropertyCallback>>? hasProperty;
/// The callback invoked when getting a property's value.
Pointer<NativeFunction<JSObjectRef.JSObjectGetPropertyCallback>>? getProperty;
/// The callback invoked when setting a property's value.
Pointer<NativeFunction<JSObjectRef.JSObjectSetPropertyCallback>>? setProperty;
/// The callback invoked when deleting a property.
Pointer<NativeFunction<JSObjectRef.JSObjectDeletePropertyCallback>>?
deleteProperty;
/// The callback invoked when collecting the names of an object's properties.
Pointer<NativeFunction<JSObjectRef.JSObjectGetPropertyNamesCallback>>?
getPropertyNames;
/// The callback invoked when an object is called as a function.
Pointer<NativeFunction<JSObjectRef.JSObjectCallAsFunctionCallback>>?
callAsFunction;
/// The callback invoked when an object is used as the target of an 'instanceof' expression.
Pointer<NativeFunction<JSObjectRef.JSObjectCallAsConstructorCallback>>?
callAsConstructor;
/// The callback invoked when an object is used as a constructor in a 'new' expression.
Pointer<NativeFunction<JSObjectRef.JSObjectHasInstanceCallback>>? hasInstance;
/// The callback invoked when converting an object to a particular JavaScript type.
Pointer<NativeFunction<JSObjectRef.JSObjectConvertToTypeCallback>>?
convertToType;
JSClassDefinition({
this.version = 0,
this.attributes = JSClassAttributes.kJSClassAttributeNone,
required this.className,
this.parentClass,
this.staticValues,
this.staticFunctions,
this.initialize,
this.finalize,
this.hasProperty,
this.getProperty,
this.setProperty,
this.deleteProperty,
this.getPropertyNames,
this.callAsFunction,
this.callAsConstructor,
this.hasInstance,
this.convertToType,
});
Pointer<JSObjectRef.JSClassDefinition> create() {
Pointer<JSObjectRef.JSStaticValue> staticValues =
this.staticValues == null || this.staticValues!.isEmpty
? nullptr
: this.staticValues!.createArray();
Pointer<JSObjectRef.JSStaticFunction> staticFunctions =
this.staticFunctions == null || this.staticFunctions!.isEmpty
? nullptr
: this.staticFunctions!.createArray();
return JSObjectRef.JSClassDefinitionPointer.allocate(
version: version,
attributes: jSClassAttributesToCEnum(attributes),
className: className.toNativeUtf8(),
parentClass: parentClass == null ? nullptr : parentClass!.pointer,
staticValues: staticValues,
staticFunctions: staticFunctions,
initialize: initialize ?? nullptr,
finalize: finalize ?? nullptr,
hasProperty: hasProperty ?? nullptr,
getProperty: getProperty ?? nullptr,
setProperty: setProperty ?? nullptr,
deleteProperty: deleteProperty ?? nullptr,
getPropertyNames: getPropertyNames ?? nullptr,
callAsFunction: callAsFunction ?? nullptr,
callAsConstructor: callAsConstructor ?? nullptr,
hasInstance: hasInstance ?? nullptr,
convertToType: convertToType ?? nullptr,
);
}
}
/// A JavaScript object. A JSObject is a JSValue.
class JSObject {
/// JavaScript context
final JSContext context;
/// C pointer
final Pointer pointer;
JSObject(this.context, this.pointer);
/// Creates a JavaScript object.
/// The default object class does not allocate storage for private data, so you must provide a non-NULL jsClass to JSObjectMake if you want your object to be able to store private data.
///
/// data is set on the created object before the intialize methods in its class chain are called. This enables the initialize methods to retrieve and manipulate data through JSObjectGetPrivate.
/// [jsClass] (JSClassRef) The JSClass to assign to the object. Pass NULL to use the default object class.
/// [data] (void*) A void* to set as the object's private data. Pass NULL to specify no private data.
JSObject.make(
this.context,
JSClass jsClass, {
Pointer? data,
}) : this.pointer = JSObjectRef.jSObjectMake(
context.pointer, jsClass.pointer, data ?? nullptr);
/// Convenience method for creating a JavaScript function with a given callback as its implementation.
/// [name] A JSString containing the function's name. This will be used when converting the function to string. Pass NULL to create an anonymous function.
/// [callAsFunction] The JSObjectCallAsFunctionCallback to invoke when the function is called.
JSObject.makeFunctionWithCallback(
this.context,
String name,
Pointer<NativeFunction<JSObjectRef.JSObjectCallAsFunctionCallback>>?
callAsFunction)
: this.pointer = JSObjectRef.jSObjectMakeFunctionWithCallback(
context.pointer,
JSString.fromString(name).pointer,
callAsFunction ?? nullptr);
/// Convenience method for creating a JavaScript constructor.
/// The default object constructor takes no arguments and constructs an object of class jsClass with no private data.
/// [jsClass] A JSClass that is the class your constructor will assign to the objects its constructs. jsClass will be used to set the constructor's .prototype property, and to evaluate 'instanceof' expressions. Pass NULL to use the default object class.
/// [callAsConstructor] A JSObjectCallAsConstructorCallback to invoke when your constructor is used in a 'new' expression. Pass NULL to use the default object constructor.
JSObject.makeConstructor(
this.context,
JSClass jsClass,
Pointer<NativeFunction<JSObjectRef.JSObjectCallAsConstructorCallback>>?
callAsConstructor)
: this.pointer = JSObjectRef.jSObjectMakeConstructor(
context.pointer, jsClass.pointer, callAsConstructor ?? nullptr);
/// Creates a JavaScript Array object.
/// The behavior of this function does not exactly match the behavior of the built-in Array constructor. Specifically, if one argument
/// is supplied, this function returns an array with one element.
/// [arguments] A JSValue array of data to populate the Array with. Pass NULL if argumentCount is 0.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeArray(
this.context,
JSValuePointer arguments, {
JSValuePointer? exception,
}) : this.pointer = JSObjectRef.jSObjectMakeArray(
context.pointer,
arguments.count,
arguments.pointer,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript Date object, as if by invoking the built-in Date constructor.
/// [arguments] A JSValue array of arguments to pass to the Date Constructor. Pass NULL if argumentCount is 0.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeDate(
this.context,
JSValuePointer arguments, {
JSValuePointer? exception,
}) : this.pointer = JSObjectRef.jSObjectMakeDate(
context.pointer,
arguments.count,
arguments.pointer,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript Error object, as if by invoking the built-in Error constructor.
/// [arguments] (JSValueRef[]) A JSValue array of arguments to pass to the Error Constructor. Pass NULL if argumentCount is 0.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeError(
this.context,
JSValuePointer arguments, {
JSValuePointer? exception,
}) : this.pointer = JSObjectRef.jSObjectMakeError(
context.pointer,
arguments.count,
arguments.pointer,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript RegExp object, as if by invoking the built-in RegExp constructor.
/// [arguments] (JSValueRef[]) A JSValue array of arguments to pass to the RegExp Constructor. Pass NULL if argumentCount is 0.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeRegExp(
this.context,
JSValuePointer arguments, {
JSValuePointer? exception,
}) : this.pointer = JSObjectRef.jSObjectMakeRegExp(
context.pointer,
arguments.count,
arguments.pointer,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript promise object by invoking the provided executor.
/// [resolve] (JSObjectRef*) A pointer to a JSObjectRef in which to store the resolve function for the new promise. Pass NULL if you do not care to store the resolve callback.
/// [reject] (JSObjectRef*) A pointer to a JSObjectRef in which to store the reject function for the new promise. Pass NULL if you do not care to store the reject callback.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeDeferredPromise(
this.context,
JSObjectPointer resolve,
JSObjectPointer reject, {
JSValuePointer? exception,
}) : this.pointer = JSObjectRef.jSObjectMakeDeferredPromise(
context.pointer,
resolve.pointer,
reject.pointer,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a function with a given script as its body.
/// Use this method when you want to execute a script repeatedly, to avoid the cost of re-parsing the script before each execution.
/// [name] A JSString containing the function's name. This will be used when converting the function to string. Pass NULL to create an anonymous function.
/// [parameterNames] (JSStringRef[]) A JSString array containing the names of the function's parameters. Pass NULL if parameterCount is 0.
/// [body] A JSString containing the script to use as the function's body.
/// [sourceURL] A JSString containing a URL for the script's source file. This is only used when reporting exceptions. Pass NULL if you do not care to include source file information in exceptions.
/// [startingLineNumber] (int) An integer value specifying the script's starting line number in the file located at sourceURL. This is only used when reporting exceptions. The value is one-based, so the first line is line 1 and invalid values are clamped to 1.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store a syntax error exception, if any. Pass NULL if you do not care to store a syntax error exception.
JSObject.makeFunction(
this.context,
String name,
JSStringPointer parameterNames,
String body,
String sourceURL, {
JSValuePointer? exception,
int startingLineNumber = 0,
}) : this.pointer = JSObjectRef.jSObjectMakeFunction(
context.pointer,
JSString.fromString(name).pointer,
parameterNames.count,
parameterNames.pointer,
JSString.fromString(body).pointer,
JSString.fromString(sourceURL).pointer,
startingLineNumber,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript Typed Array object with the given number of elements.
/// [arrayType] A value [JSTypedArrayType] identifying the type of array to create. If arrayType is kJSTypedArrayTypeNone or kJSTypedArrayTypeArrayBuffer then NULL will be returned.
/// [length] (size_t) The number of elements to be in the new Typed Array.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeTypedArray(
this.context,
JSTypedArrayType arrayType,
int length, {
JSValuePointer? exception,
}) : this.pointer = JSTypedArray.jSObjectMakeTypedArray(
context.pointer,
JSValue.jSTypedArrayTypeToCEnum(arrayType),
length,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript Typed Array object from an existing pointer.
/// If an exception is thrown during this function the bytesDeallocator will always be called.
/// [arrayType] A value [JSTypedArrayType] identifying the type of array to create. If arrayType is kJSTypedArrayTypeNone or kJSTypedArrayTypeArrayBuffer then NULL will be returned.
/// [bytes] (void*) A pointer to the byte buffer to be used as the backing store of the Typed Array object.
/// [byteLength] The number of bytes pointed to by the parameter bytes.
/// [bytesDeallocator] (JSTypedArrayBytesDeallocator) The allocator to use to deallocate the external buffer when the JSTypedArrayData object is deallocated.
/// [deallocatorContext] (void*) A pointer to pass back to the deallocator.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeTypedArrayWithBytesNoCopy(
this.context,
JSTypedArrayType arrayType,
Bytes bytes,
Pointer<NativeFunction<JSBase.JSTypedArrayBytesDeallocator>>?
bytesDeallocator,
Pointer deallocatorContext, {
JSValuePointer? exception,
}) : this.pointer = JSTypedArray.jSObjectMakeTypedArrayWithBytesNoCopy(
context.pointer,
JSValue.jSTypedArrayTypeToCEnum(arrayType),
bytes.pointer,
bytes.length,
bytesDeallocator ?? nullptr,
deallocatorContext,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript Typed Array object from an existing JavaScript Array Buffer object.
/// [arrayType] A value [JSTypedArrayType] identifying the type of array to create. If arrayType is kJSTypedArrayTypeNone or kJSTypedArrayTypeArrayBuffer then NULL will be returned.
/// [buffer] An Array Buffer object that should be used as the backing store for the created JavaScript Typed Array object.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeTypedArrayWithArrayBuffer(
this.context,
JSTypedArrayType arrayType,
JSObject buffer, {
JSValuePointer? exception,
}) : this.pointer = JSTypedArray.jSObjectMakeTypedArrayWithArrayBuffer(
context.pointer,
JSValue.jSTypedArrayTypeToCEnum(arrayType),
buffer.pointer,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript Typed Array object from an existing JavaScript Array Buffer object with the given offset and length.
/// [arrayType] A value [JSTypedArrayType] identifying the type of array to create. If arrayType is kJSTypedArrayTypeNone or kJSTypedArrayTypeArrayBuffer then NULL will be returned.
/// [buffer] (JSObjectRef) An Array Buffer object that should be used as the backing store for the created JavaScript Typed Array object.
/// [byteOffset] (size_t) The byte offset for the created Typed Array. byteOffset should aligned with the element size of arrayType.
/// [length] (size_t) The number of elements to include in the Typed Array.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeTypedArrayWithArrayBufferAndOffset(
this.context,
JSTypedArrayType arrayType,
JSObject buffer,
int byteOffset,
int length, {
JSValuePointer? exception,
}) : this.pointer =
JSTypedArray.jSObjectMakeTypedArrayWithArrayBufferAndOffset(
context.pointer,
JSValue.jSTypedArrayTypeToCEnum(arrayType),
buffer.pointer,
byteOffset,
length,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Creates a JavaScript Array Buffer object from an existing pointer.
/// If an exception is thrown during this function the bytesDeallocator will always be called.
/// [bytes] (void*) A pointer to the byte buffer to be used as the backing store of the Typed Array object.
/// [bytesDeallocator] (JSTypedArrayBytesDeallocator) The allocator to use to deallocate the external buffer when the Typed Array data object is deallocated.
/// [deallocatorContext] (void*) A pointer to pass back to the deallocator.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject.makeArrayBufferWithBytesNoCopy(
this.context,
Bytes bytes,
Pointer<NativeFunction<JSBase.JSTypedArrayBytesDeallocator>>?
bytesDeallocator,
Pointer deallocatorContext, {
JSValuePointer? exception,
}) : this.pointer = JSTypedArray.jSObjectMakeArrayBufferWithBytesNoCopy(
context.pointer,
bytes.pointer,
bytes.length,
bytesDeallocator ?? nullptr,
deallocatorContext,
(exception ?? JSValuePointer(nullptr)).pointer);
/// Gets an object's prototype.
JSValue get prototype {
return JSValue(
context, JSObjectRef.jSObjectGetPrototype(context.pointer, pointer));
}
/// Sets an object's prototype.
/// [value] (JSValueRef) A JSValue to set as the object's prototype.
set prototype(JSValue value) {
JSObjectRef.jSObjectSetPrototype(context.pointer, pointer, value.pointer);
}
/// Tests whether an object has a given property.
/// [propertyName] (JSStringRef) A JSString containing the property's name.
bool hasProperty(String propertyName) {
return JSObjectRef.jSObjectHasProperty(context.pointer, pointer,
JSString.fromString(propertyName).pointer) ==
1;
}
/// Tests whether an object has a given property.
/// [propertyName] (JSStringRef) A JSString containing the property's name.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSValue getProperty(
String propertyName, {
JSValuePointer? exception,
}) {
return JSValue(
context,
JSObjectRef.jSObjectGetProperty(
context.pointer,
pointer,
JSString.fromString(propertyName).pointer,
(exception ?? JSValuePointer(nullptr)).pointer));
}
/// Sets a property on an object.
/// [propertyName] A JSString containing the property's name.
/// [value] A JSValueRef to use as the property's value.
/// [attributes] (JSPropertyAttributes) A logically ORed set of JSPropertyAttributes to give to the property.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
void setProperty(
String propertyName,
JSValue value,
JSPropertyAttributes attributes, {
JSValuePointer? exception,
}) {
JSObjectRef.jSObjectSetProperty(
context.pointer,
pointer,
JSString.fromString(propertyName).pointer,
value.pointer,
jSPropertyAttributesToCEnum(attributes),
(exception ?? JSValuePointer(nullptr)).pointer);
}
/// Deletes a property from an object.
/// [propertyName] A JSString containing the property's name.
/// [exception] A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
/// [@result] (bool) true if the delete operation succeeds, otherwise false (for example, if the property has the kJSPropertyAttributeDontDelete attribute set).
bool deleteProperty(
String propertyName, {
JSValuePointer? exception,
}) {
return JSObjectRef.jSObjectDeleteProperty(
context.pointer,
pointer,
JSString.fromString(propertyName).pointer,
(exception ?? JSValuePointer(nullptr)).pointer) ==
1;
}
/// Tests whether an object has a given property using a JSValueRef as the property key.
/// This function is the same as performing "propertyKey in object" from JavaScript.
/// [propertyKey] A JSValueRef containing the property key to use when looking up the property.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
bool hasPropertyForKey(
String propertyKey, {
JSValuePointer? exception,
}) {
return JSObjectRef.jSObjectHasPropertyForKey(
context.pointer,
pointer,
JSString.fromString(propertyKey).pointer,
(exception ?? JSValuePointer(nullptr)).pointer) ==
1;
}
/// Gets a property from an object using a JSValueRef as the property key.
/// This function is the same as performing "object[propertyKey]" from JavaScript.
/// [propertyKey] A JSValueRef containing the property key to use when looking up the property.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSValue getPropertyForKey(
String propertyKey, {
JSValuePointer? exception,
}) {
return JSValue(
context,
JSObjectRef.jSObjectGetPropertyForKey(
context.pointer,
pointer,
JSString.fromString(propertyKey).pointer,
(exception ?? JSValuePointer(nullptr)).pointer));
}
/// Sets a property on an object using a JSValueRef as the property key.
/// This function is the same as performing "object[propertyKey] = value" from JavaScript.
/// [propertyKey] (JSValueRef) A JSValueRef containing the property key to use when looking up the property.
/// [value] (JSValueRef) A JSValueRef to use as the property's value.
/// [attributes] (JSPropertyAttributes) A logically ORed set of JSPropertyAttributes to give to the property.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
void setPropertyForKey(
String propertyKey,
JSValue value,
JSPropertyAttributes attributes, {
JSValuePointer? exception,
}) {
JSObjectRef.jSObjectSetPropertyForKey(
context.pointer,
pointer,
JSString.fromString(propertyKey).pointer,
value.pointer,
jSPropertyAttributesToCEnum(attributes),
(exception ?? JSValuePointer(nullptr)).pointer);
}
/// Gets a property from an object by numeric index.
/// Calling JSObjectGetPropertyAtIndex is equivalent to calling JSObjectGetProperty with a string containing propertyIndex, but JSObjectGetPropertyAtIndex provides optimized access to numeric properties.
/// [propertyIndex] (unsigned) An integer value that is the property's name.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSValue getPropertyAtIndex(
int propertyIndex, {
JSValuePointer? exception,
}) {
return JSValue(
context,
JSObjectRef.jSObjectGetPropertyAtIndex(context.pointer, pointer,
propertyIndex, (exception ?? JSValuePointer(nullptr)).pointer));
}
/// Sets a property on an object by numeric index.
/// Calling JSObjectSetPropertyAtIndex is equivalent to calling JSObjectSetProperty with a string containing propertyIndex, but JSObjectSetPropertyAtIndex provides optimized access to numeric properties.
/// [propertyIndex] (unsigned) The property's name as a number.
/// [value] (JSValueRef) A JSValue to use as the property's value.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
void setPropertyAtIndex(
int propertyIndex,
JSValue value, {
JSValuePointer? exception,
}) {
JSObjectRef.jSObjectSetPropertyAtIndex(
context.pointer,
pointer,
propertyIndex,
value.pointer,
(exception ?? JSValuePointer(nullptr)).pointer);
}
/// Gets an object's private data.
Pointer get private {
return JSObjectRef.jSObjectGetPrivate(pointer);
}
/// Sets a pointer to private data on an object.
/// The default object class does not allocate storage for private data. Only objects created with a non-NULL JSClass can store private data.
/// [data] (void*) A void* to set as the object's private data.
bool setPrivate(Pointer data) {
return JSObjectRef.jSObjectSetPrivate(pointer, data) == 1;
}
/// Tests whether an object can be called as a function.
bool get isFunction {
return JSObjectRef.jSObjectIsFunction(context.pointer, pointer) == 1;
}
/// Calls an object as a function.
/// [thisObject] (JSObjectRef) The object to use as "this," or NULL to use the global object as "this."
/// [arguments] (JSValueRef[]) A JSValue array of arguments to pass to the function. Pass NULL if argumentCount is 0.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
/// [@result] (JSValue) The JSValue that results from calling object as a function, or NULL if an exception is thrown or object is not a function.
JSValue callAsFunction(
JSObject thisObject,
JSValuePointer arguments, {
JSValuePointer? exception,
}) {
return JSValue(
context,
JSObjectRef.jSObjectCallAsFunction(
context.pointer,
pointer,
thisObject.pointer,
arguments.count,
arguments.pointer,
(exception ?? JSValuePointer(nullptr)).pointer));
}
/// Tests whether an object can be called as a constructor.
bool get isConstructor {
return JSObjectRef.jSObjectIsConstructor(context.pointer, pointer) == 1;
}
/// Calls an object as a constructor.
/// [arguments] (JSValueRef[]) A JSValue array of arguments to pass to the constructor. Pass NULL if argumentCount is 0.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
/// [@result] (JSObjectRef) The JSObject that results from calling object as a constructor, or NULL if an exception is thrown or object is not a constructor.
JSObject callAsConstructor(
JSValuePointer arguments, {
JSValuePointer? exception,
}) {
return JSObject(
context,
JSObjectRef.jSObjectCallAsConstructor(
context.pointer,
pointer,
arguments.count,
arguments.pointer,
(exception ?? JSValuePointer(nullptr)).pointer));
}
/// Gets the names of an object's enumerable properties.
JSPropertyNameArray copyPropertyNames() {
return JSPropertyNameArray(
JSObjectRef.jSObjectCopyPropertyNames(context.pointer, pointer));
}
/// Adds a property name to a JavaScript property name accumulator.
/// [accumulator] (JSPropertyNameAccumulatorRef) The accumulator object to which to add the property name.
/// [propertyName] (JSStringRef) The property name to add.
void propertyNameAccumulatorAddName(
JSPropertyNameAccumulator accumulator, String propertyName) {
JSObjectRef.jSPropertyNameAccumulatorAddName(
accumulator.pointer, JSString.fromString(propertyName).pointer);
}
/// Returns a temporary pointer to the backing store of a JavaScript Typed Array object.
/// The pointer returned by this function is temporary and is not guaranteed to remain valid across JavaScriptCore API calls.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
Bytes typedArrayBytes({
JSValuePointer? exception,
}) {
return Bytes(
JSTypedArray.jSObjectGetTypedArrayBytesPtr(context.pointer, pointer,
(exception ?? JSValuePointer(nullptr)).pointer),
JSTypedArray.jSObjectGetTypedArrayLength(context.pointer, pointer,
(exception ?? JSValuePointer(nullptr)).pointer));
}
/// Returns the byte length of a JavaScript Typed Array object.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
int typedArrayByteLength({
JSValuePointer? exception,
}) {
return JSTypedArray.jSObjectGetTypedArrayByteLength(context.pointer,
pointer, (exception ?? JSValuePointer(nullptr)).pointer);
}
/// Returns the byte offset of a JavaScript Typed Array object.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
int typedArrayByteOffset({
JSValuePointer? exception,
}) {
return JSTypedArray.jSObjectGetTypedArrayByteOffset(context.pointer,
pointer, (exception ?? JSValuePointer(nullptr)).pointer);
}
/// Returns the JavaScript Array Buffer object that is used as the backing of a JavaScript Typed Array object.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
JSObject typedArrayBuffer({
JSValuePointer? exception,
}) {
return JSObject(
context,
JSTypedArray.jSObjectGetTypedArrayBuffer(context.pointer, pointer,
(exception ?? JSValuePointer(nullptr)).pointer));
}
/// Returns a pointer to the data buffer that serves as the backing store for a JavaScript Typed Array object.
/// The pointer returned by this function is temporary and is not guaranteed to remain valid across JavaScriptCore API calls.
/// [exception] (JSValueRef*) A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
Bytes arrayBufferBytes({
JSValuePointer? exception,
}) {
return Bytes(
JSTypedArray.jSObjectGetArrayBufferBytesPtr(context.pointer, pointer,
(exception ?? JSValuePointer(nullptr)).pointer),
JSTypedArray.jSObjectGetArrayBufferByteLength(context.pointer, pointer,
(exception ?? JSValuePointer(nullptr)).pointer));
}
/// JSObject to JSValue
JSValue toValue() {
return JSValue(context, pointer);
}
}
/// JSObjectRef pointer
class JSObjectPointer {
/// C pointer
final Pointer<Pointer> pointer;
/// Pointer array count
final int count;
JSObjectPointer([Pointer? value])
: this.count = 1,
this.pointer = malloc.call<Pointer>(1) {
pointer.value = value ?? nullptr;
}
/// JSObjectRef array
JSObjectPointer.array(List<JSObject> array)
: this.count = array.length,
this.pointer = malloc.call<Pointer>(array.length) {
for (int i = 0; i < array.length; i++) {
this.pointer[i] = array[i].pointer;
}
}
/// Get JSValue
/// [index] Array index
JSObject getValue(JSContext context, [int index = 0]) {
return JSObject(context, pointer[index]);
}
}
/// A pointer to the byte buffer to be used as the backing store of the Typed Array object.
class Bytes {
/// C pointer
final Pointer pointer;
/// Bytes count
final int length;
Bytes(this.pointer, this.length);
}
| xuelongqy/flutter_jscore |
<|start_filename|>LuoguPaintboardPro/LuoguAccount.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
using System.Net.Http;
using System.Threading.Tasks;
namespace LuoguPaintboardPro
{
class LuoguAccount
{
public string Uid { get; private set; }
public string OriginalCookie { get; private set; }
public int FailureCount { get; set; } = 0;
public DateTime ReadyTime { get; set; }
HttpClient client;
public LuoguAccount(string cookieText)
{
client = new HttpClient();
client.DefaultRequestHeaders.Add("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36 Edg/79.0.309.56");
client.DefaultRequestHeaders.Add("cookie", cookieText);
OriginalCookie = cookieText;
Uid = Regex.Match(cookieText, @"_uid=(\d*)").Captures[0].Value[5..];
ReadyTime = DateTime.Now;
}
public async Task<string[]> GetBoard()
{
var response = await client.GetAsync("https://www.luogu.com.cn/paintBoard/board");
if (!response.IsSuccessStatusCode) {
throw new Exception($"使用账号 {Uid} 时 Http 状态码异常! 返回状态码: {response.StatusCode}");
}
var result = (await response.Content.ReadAsStringAsync()).Split('\n');
if (result.Length < 2) throw new Exception($"使用账号 {Uid} 时获取画板失败! 返回状态码: {response.StatusCode}");
return result;
}
public async Task<bool> Paint(int x, int y, int color)
{
try
{
Console.WriteLine($"正在使用账号 {Uid} 绘制 ({x}, {y}), 颜色 {color}");
var content = new FormUrlEncodedContent(new[]
{
new KeyValuePair<string, string>("x", x.ToString()),
new KeyValuePair<string, string>("y", y.ToString()),
new KeyValuePair<string, string>("color", color.ToString())
});
var response = await client.PostAsync("https://www.luogu.com.cn/paintBoard/paint", content);
if (!response.IsSuccessStatusCode)
{
FailureCount++;
Console.WriteLine($"使用账号 {Uid} 时 Http 状态码异常! 返回状态码: {response.StatusCode}");
Console.WriteLine(await response.Content.ReadAsStringAsync());
return false;
}
else
{
var res = await response.Content.ReadAsStringAsync();
if (res.Contains("200"))
{
Console.WriteLine($"使用账号 {Uid} 绘制成功");
return true;
}
else
{
Console.WriteLine($"使用账号 {Uid} 时洛谷返回了错误!");
Console.WriteLine(res);
return false;
}
}
}
catch (Exception ex)
{
Console.WriteLine($"使用账号 {Uid} 时出现了异常!");
Console.WriteLine(ex.Message);
FailureCount++;
return false;
}
}
}
class LuoguAccountComparer : IComparer<LuoguAccount>
{
int IComparer<LuoguAccount>.Compare(LuoguAccount x, LuoguAccount y)
{
return x.ReadyTime.CompareTo(y.ReadyTime);
}
}
}
<|start_filename|>LuoguPaintboardPro/ImageProcessor.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Text;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Numerics;
namespace LuoguPaintboardPro
{
static class ImageProcessor
{
static readonly string[] BoardColorStrings = { "rgb(0, 0, 0)", "rgb(255, 255, 255)", "rgb(170, 170, 170)", "rgb(85, 85, 85)", "rgb(254, 211, 199)", "rgb(255, 196, 206)", "rgb(250, 172, 142)", "rgb(255, 139, 131)", "rgb(244, 67, 54)", "rgb(233, 30, 99)", "rgb(226, 102, 158)", "rgb(156, 39, 176)", "rgb(103, 58, 183)", "rgb(63, 81, 181)", "rgb(0, 70, 112)", "rgb(5, 113, 151)", "rgb(33, 150, 243)", "rgb(0, 188, 212)", "rgb(59, 229, 219)", "rgb(151, 253, 220)", "rgb(22, 115, 0)", "rgb(55, 169, 60)", "rgb(137, 230, 66)", "rgb(215, 255, 7)", "rgb(255, 246, 209)", "rgb(248, 203, 140)", "rgb(255, 235, 59)", "rgb(255, 193, 7)", "rgb(255, 152, 0)", "rgb(255, 87, 34)", "rgb(184, 63, 39)", "rgb(121, 85, 72)" };
static Color[] BoardColors = null;
static void prepareColors()
{
if (BoardColors == null)
{
BoardColors = new Color[BoardColorStrings.Length];
for (int i = 0; i < BoardColorStrings.Length; i++)
{
var s = BoardColorStrings[i][4..^1].Split(", ");
BoardColors[i] = Color.FromArgb(int.Parse(s[0]), int.Parse(s[1]), int.Parse(s[2]));
}
}
}
static int getClosestColorIndex(Vector3 color)
{
int result = 0;
float resultDis = float.PositiveInfinity;
for (int i = 0; i < BoardColors.Length; i++)
{
var cur = BoardColors[i];
float dis = (cur.R - color.X) * (cur.R - color.X) + (cur.G - color.Y) * (cur.G - color.Y) + (cur.B - color.Z) * (cur.B - color.Z);
if (dis < resultDis)
{
result = i;
resultDis = dis;
}
}
return result;
}
static char indexToChar(int index)
{
return index < 10 ? (char)('0' + index) : (char)('a' + index - 10);
}
public static void ProcessImage(string input, string output)
{
prepareColors();
Console.WriteLine($"输入文件: {input}");
if (string.IsNullOrEmpty(output)) output = "data.txt";
var image = new Bitmap(input);
var result = new char[image.Height, image.Width];
var preview = new Bitmap(image.Width, image.Height);
var vecImg = new Vector3[image.Height, image.Width];
Vector3 colorToVector(Color c) => new Vector3(c.R, c.G, c.B);
for (int x = 0; x < image.Width; x++)
{
for (int y = 0; y < image.Height; y++)
{
vecImg[y, x] = colorToVector(image.GetPixel(x, y));
}
}
for (int y = 0; y < image.Height; y++)
{
for (int x = 0; x < image.Width; x++)
{
var color = vecImg[y, x];
var newColorIndex = getClosestColorIndex(color);
result[y, x] = indexToChar(newColorIndex);
preview.SetPixel(x, y, BoardColors[newColorIndex]);
var newColor = colorToVector(BoardColors[newColorIndex]);
var error = color - newColor;
if (x < image.Width - 1) vecImg[y, x + 1] += error * 7 / 16;
if (y < image.Height - 1) {
if (x > 0) vecImg[y + 1, x - 1] += error * 3 / 16;
vecImg[y + 1, x] += error * 5 / 16;
if (x < image.Width - 1) vecImg[y + 1, x + 1] += error * 1 / 16;
}
}
}
var outFile = new StreamWriter(output);
for (int i = 0; i < image.Height; i++)
{
for (int j = 0; j < image.Width; j++)
{
outFile.Write(result[i, j]);
}
outFile.Write(Environment.NewLine);
}
outFile.Flush();
outFile.Close();
preview.Save("preview.png");
Console.WriteLine($"图片处理完成, 输出文件 {output}");
}
}
}
<|start_filename|>LuoguPaintboardPro/PointToDraw.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Text;
namespace LuoguPaintboardPro
{
struct PointToDraw
{
public int X { get; set; }
public int Y { get; set; }
public int Color { get; set; }
public int R { get; set; }
static Random rand = new Random();
public PointToDraw(int x, int y, int color)
{
X = x;
Y = y;
Color = color;
R = rand.Next();
}
}
class PointToDrawComparer : IComparer<PointToDraw>
{
int IComparer<PointToDraw>.Compare(PointToDraw x, PointToDraw y)
{
// NOTE: 在此修改按顺序画还是随机画
return x.R.CompareTo(y.R);
// return x.Y.CompareTo(y.Y) == 0 ? x.X.CompareTo(y.X) : x.Y.CompareTo(y.Y);
}
}
}
<|start_filename|>LuoguPaintboardPro/Program.cs<|end_filename|>
using System;
using Microsoft.Extensions.CommandLineUtils;
using System.IO;
namespace LuoguPaintboardPro
{
class Program
{
static void Main(string[] args)
{
var app = new CommandLineApplication();
app.HelpOption("-h|--help");
app.VersionOption("-v|--version", System.Reflection.Assembly.GetExecutingAssembly().GetName().Version.ToString());
app.Name = "LuoguPaintboardPro";
app.Description = "洛谷冬日滑板辅助器增强版.";
app.OnExecute(() =>
{
app.ShowHelp();
return 0;
});
app.Command("genpic", command =>
{
command.Description = "将彩色图片处理为 32 色图.";
command.HelpOption("-h|--help");
var inputFileArg = command.Argument("inputFile", "输入的 png 图像.");
var outputFileArg = command.Argument("[outputFile]", "输出文件.");
command.OnExecute(() =>
{
ImageProcessor.ProcessImage(inputFileArg.Value, outputFileArg.Value);
return 0;
});
});
app.Command("draw", command =>
{
command.Description = "在指定坐标绘制图片, 并监视保护.";
command.HelpOption("-h|--help");
var sXArg = command.Argument("xpos", "绘制图片位置的 x 坐标");
var sYArg = command.Argument("ypos", "绘制图片位置的 Y 坐标");
var imageFileArg = command.Argument("image", "由 genpic 生成的代表图片的 txt");
var cookieFileArg = command.Argument("cookie", "包含要使用的 cookie 的 txt 文件, 一行一个 cookie.");
command.OnExecute(() =>
{
int sx = int.Parse(sXArg.Value);
int sy = int.Parse(sYArg.Value);
var image = File.ReadAllText(imageFileArg.Value ?? "data.txt").Split('\n');
var cookie = File.ReadAllText(cookieFileArg.Value ?? "cookie.txt").Split('\n');
int w = 0, h = 0;
for (int i = 0; i < image.Length; i++)
{
image[i] = image[i].Trim();
if (!string.IsNullOrWhiteSpace(image[i]))
{
h++;
w = Math.Max(w, image[i].Length);
}
}
Console.WriteLine($"读取到图片 {imageFileArg.Value ?? "data.txt"}, 宽 {w}, 高 {h}");
var imageArray = new char[h, w];
for (int i = 0; i < h; i++)
{
for (int j = 0; j < w; j++)
{
imageArray[i, j] = image[i][j];
}
}
if (sy + h > 600 || sx + w > 1000) {
Console.WriteLine("坐标超出范围了!");
return 1;
}
var opr = new PaintboardOperator(cookie);
var task = opr.Work(imageArray, w, h, sx, sy);
Console.CancelKeyPress += (s, args) =>
{
Console.WriteLine("已终止程序.");
};
task.Wait();
return 0;
});
});
app.Execute(args);
}
}
}
<|start_filename|>LuoguPaintboardPro/Properties/launchSettings.json<|end_filename|>
{
"profiles": {
"LuoguPaintboardPro": {
"commandName": "Project",
"commandLineArgs": "draw 486 156",
"workingDirectory": "C:\\Users\\xiaog\\source\\repos\\LuoguPaintboardPro\\LuoguPaintboardPro"
}
}
}
<|start_filename|>LuoguPaintboardPro/PaintboardOperator.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using System.Text;
using System.Web;
namespace LuoguPaintboardPro
{
class PaintboardOperator
{
PriorityQueue<PointToDraw> pointQueue = new PriorityQueue<PointToDraw>(new PointToDrawComparer());
Queue<LuoguAccount> accountQueue = new Queue<LuoguAccount>();
public PaintboardOperator(string[] cookies)
{
foreach (var i in cookies)
{
if (string.IsNullOrWhiteSpace(i)) continue;
accountQueue.Enqueue(new LuoguAccount(i.Trim()));
}
Console.WriteLine($"识别到 {accountQueue.Count} 个 cookie" );
}
int CharToColorIndex(char ch)
{
return ch <= '9' ? ch - '0' : ch - 'a' + 10;
}
void RefreshPointQueue(char[,] image, int w, int h, int sx, int sy)
{
Console.WriteLine($"正在刷新画板");
try
{
var newQueue = new PriorityQueue<PointToDraw>(new PointToDrawComparer());
var currentBoard = accountQueue.Peek().GetBoard().Result;
for (int i = 0; i < h; i++)
{
for (int j = 0; j < w; j++)
{
if (image[i, j] == '_') continue;
int x = sx + j;
int y = sy + i;
if (currentBoard[x][y] != image[i, j])
{
newQueue.Push(new PointToDraw(x, y, CharToColorIndex(image[i, j])));
}
}
}
pointQueue = newQueue;
}
catch (Exception ex)
{
Console.WriteLine("出现了异常!");
Console.WriteLine(ex.Message);
}
}
static readonly TimeSpan CoolDownTime = new TimeSpan(0, 0, 31);
static readonly TimeSpan NetworkCoolDownTime = new TimeSpan(0, 0, 0, 0, 100);
static readonly DateTime FinishTime = new DateTime(2021, 1, 4, 0, 0, 0);
public static int TotalPointDrown { get; set; } = 0;
public async Task Work(char[,] image, int w, int h, int sx, int sy)
{
Console.WriteLine($"正在开始绘制, 预计用时 {CoolDownTime * (w * h / accountQueue.Count)}");
while (true)
{
RefreshPointQueue(image, w, h, sx, sy);
if (pointQueue.Count == 0)
{
if (FinishTime - DateTime.Now >= new TimeSpan(0, 5, 0))
{
Console.WriteLine("已全部绘制完成, 每隔 30 秒检测一次破坏情况");
Task.Delay(new TimeSpan(0, 0, 30)).Wait();
}
else
{
Console.WriteLine("最后五分钟, 高密度检测");
Task.Delay(new TimeSpan(0, 0, 1)).Wait();
}
continue;
}
while (pointQueue.Count > 0)
{
var point = pointQueue.Pop();
bool ok;
do
{
Task.Delay(NetworkCoolDownTime).Wait();
var cur = accountQueue.Dequeue();
if (DateTime.Now > FinishTime)
{
Console.WriteLine("活动已结束.");
return;
}
if (DateTime.Now < cur.ReadyTime)
{
await Task.Delay(cur.ReadyTime - DateTime.Now);
}
ok = await cur.Paint(point.X, point.Y, point.Color);
cur.ReadyTime += CoolDownTime;
accountQueue.Enqueue(cur);
} while (!ok);
TotalPointDrown++;
if (TotalPointDrown % 100 == 0) RefreshPointQueue(image, w, h, sx, sy);
}
}
}
}
}
<|start_filename|>LuoguPaintboardPro/paintboard.js<|end_filename|>
H = 400;
W = 800;
nowcolor = 0;
scale = 5;
dragged = 0;
lasttime = 0;
timelimit = 10;
colorlist = ['rgb(0, 0, 0)', 'rgb(255, 255, 255)', 'rgb(170, 170, 170)', 'rgb(85, 85, 85)', 'rgb(254, 211, 199)', 'rgb(255, 196, 206)', 'rgb(250, 172, 142)', 'rgb(255, 139, 131)', 'rgb(244, 67, 54)', 'rgb(233, 30, 99)', 'rgb(226, 102, 158)', 'rgb(156, 39, 176)', 'rgb(103, 58, 183)', 'rgb(63, 81, 181)', 'rgb(0, 70, 112)', 'rgb(5, 113, 151)', 'rgb(33, 150, 243)', 'rgb(0, 188, 212)', 'rgb(59, 229, 219)', 'rgb(151, 253, 220)', 'rgb(22, 115, 0)', 'rgb(55, 169, 60)', 'rgb(137, 230, 66)', 'rgb(215, 255, 7)', 'rgb(255, 246, 209)', 'rgb(248, 203, 140)', 'rgb(255, 235, 59)', 'rgb(255, 193, 7)', 'rgb(255, 152, 0)', 'rgb(255, 87, 34)', 'rgb(184, 63, 39)', 'rgb(121, 85, 72)'];
nowintevel = 0;
var myarr = [];
for (var i = 0; i < H; i++) {
myarr[i] = [];
for (var j = 0; j < W; j++) {
myarr[i][j] = '#dddddd';
}
}
function render(arr) {
var c = document.getElementById("mycanvas");
var ctx = c.getContext("2d");
for (var i = 0; i < H; i++) {
for (var j = 0; j < W; j++) {
ctx.fillStyle = arr[i][j];
ctx.fillRect(j * scale, i * scale, scale, scale);
}
}
}
function update(y, x, color) {
if (dragged) {
dragged = 0;
return;
}
//alert('ss');
var c = document.getElementById("mycanvas");
var ctx = c.getContext("2d");
ctx.save();
ctx.fillStyle = color;
ctx.fillRect(x * 5, y * 5, 5, 5);
}
function initpale() {
$('#palette').html('');
colorlist.forEach(function (k, v) {
console.log(k, v);
$('#palette').append('<div class="paleitem" data-cid=' + v + '></div>');
$('[data-cid=' + v + ']').css('background', k);
});
zoom(1)
}
binditem = function () {
$('.paleitem').removeClass("selected");
$(this).addClass("selected");
nowcolor = $(this).attr('data-cid');
}
zoom = function (s) {
scale = s;
$('#mycanvas').width(800 * scale)
if (s == 1) {
$('#mycanvas').css('top', 0);
$('#mycanvas').css('left', 0);
}
}
$("[zoom]").click(function () {
zoom($(this).attr('zoom'));
});
myarr[10][10] = '#6600ff';
myarr[100][200] = '#66ccff';
render(myarr);
initpale();
$('.paleitem').bind("click", binditem);
$('[data-cid=0]').addClass("selected");
$('#mycanvas').bind("click", function () {
//alert(event.offsetY);
if (new Date() < (lasttime + timelimit) * 1000) {
alert("冷却时间未到,暂时不能涂色");
return;
}
var x = parseInt(event.offsetX / scale);
var y = parseInt(event.offsetY / scale);
update(
y,
x,
colorlist[nowcolor]
);
$.post("/paintBoard/paint", {
x: x,
y: y,
color: nowcolor
}, function (resp) {
if (resp.status !== 200) {
alert(resp.data)
} else {
lasttime = (new Date()) / 1000;
getCountDown(lasttime + timelimit);
}
});
})
$('#mycanvas').draggable({
cursor: "move",
stop: function () {
dragged = 1;
}
});
$('#mycanvas').bind("mousewheel", function (event, delta) {
var delta = event.originalEvent.deltaY;
var y = parseInt(event.offsetY / scale);
var x = parseInt(event.offsetX / scale);
console.log(event);
if (delta > 0) {
if (scale == 10)
zoom(5);
else if (scale == 5)
zoom(1);
} else {
if (scale == 1)
zoom(5);
else if (scale == 5)
zoom(10);
}
if (scale != 1) {
$('#mycanvas').css('top', -y * scale + 200);
$('#mycanvas').css('left', -x * scale + 400);
}
scale
return false;
});
function getCountDown(timestamp) {
clearInterval(nowintevel);
nowintevel = setInterval(function () {
var nowTime = new Date();
var endTime = new Date(timestamp * 1000);
var t = endTime.getTime() - nowTime.getTime();
if (t < 0) {
$("#timeleft").html("冷却时间到");
clearInterval(nowintevel);
return;
}
var hour = Math.floor(t / 1000 / 60 / 60 % 24);
var min = Math.floor(t / 1000 / 60 % 60);
var sec = Math.floor(t / 1000 % 60);
if (hour < 10)
hour = "0" + hour;
if (min < 10)
min = "0" + min;
if (sec < 10)
sec = "0" + sec;
var countDownTime = hour + ":" + min + ":" + sec;
$("#timeleft").html(countDownTime);
}, 1000);
}
getCountDown(lasttime + timelimit);
function initialPaint() {
$.get("/paintBoard/board", function (resp) {
resp.split('\n').map(function (colorStr, x) {
colorStr.split("").map(function (color, y) {
//if(color !== '2') console.log(x, y, color);
update(y, x, colorlist[parseInt(color, 32)]);
});
});
});
}
var ws = null;
function connectWs() {
try {
ws = new WebSocket('wss://ws.luogu.com.cn/ws');
} catch (e) {
alert("无法连接追踪服务器");
return;
}
ws.onopen = function () {
var message = {
"type": "join_channel",
"channel": "paintboard",
"channel_param": ""
};
ws.send(JSON.stringify(message));
};
ws.onmessage = function (event) {
var data = JSON.parse(event.data);
if (data.type === "paintboard_update") {
update(data.y, data.x, colorlist[data.color]);
} else if (data.type === "result") {
initialPaint()
}
};
}
connectWs(); | Duanyll/LuoguPaintboardPro |
<|start_filename|>Makefile<|end_filename|>
.PHONY : playground install test github push kazan
install:
rm -f yarn.lock || true
yarn install
test:
yarn test
playground:
yarn playground
gitlab:
$(MAKE) test
git add .
git status
git commit -m"[sync]"|| true
git push origin master
github:
$(MAKE) test
git add .
git status
git commit -m"[sync]"|| true
git push github master
publish:
$(MAKE) gitlab
$(MAKE) github
npm publish
<|start_filename|>src/main.js<|end_filename|>
module.exports = require("./composeWithMysql")
<|start_filename|>tests/limit.test.js<|end_filename|>
const GraphQL = require("graphql")
const { composeWithMysql } = require("../src/main")
describe("Limit test", () => {
jest.setTimeout(30000)
test("get only the first 2 employees with only _limit parameter", () => {
return composeWithMysql({
mysqlConfig: {
//debug: ['ComQueryPacket'],
//connectionLimit: 100,
prefix: "emp_",
host: "localhost",
port: 3306,
user: "root",
password: "<PASSWORD>",
database: "employees",
},
}).then(employeesSchema => {
const gqlQuery = `
{
employees (_limit: 2) {
emp_no
first_name
last_name
gender
birth_date
hire_date
}
}`
return GraphQL.graphql({
schema: employeesSchema,
source: gqlQuery,
variableValues: {},
contextValue: {}
}).then(gqlResponse => {
expect(gqlResponse).toMatchObject({
"data": {
"employees": [{
"birth_date": "1953-09-01T23:00:00.000Z",
"emp_no": 10001,
"first_name": "Georgi",
"gender": "M",
"hire_date": "1986-06-25T22:00:00.000Z",
"last_name": "Facello"
},
{
"birth_date": "1964-06-01T23:00:00.000Z",
"emp_no": 10002,
"first_name": "Bezalel",
"gender": "F",
"hire_date": "1985-11-20T23:00:00.000Z",
"last_name": "Simmel"
}]
}
}
)
})
})
})
test("get only the first 2 employees with _limit and another parameter", () => {
return composeWithMysql({
mysqlConfig: {
//debug: ['ComQueryPacket'],
//connectionLimit: 100,
prefix: "emp_",
host: "localhost",
port: 3306,
user: "root",
password: "<PASSWORD>",
database: "employees",
},
}).then(employeesSchema => {
const gqlQuery = `
{
employees (_limit: 2, gender: "F") {
emp_no
first_name
last_name
gender
birth_date
hire_date
}
}`
return GraphQL.graphql({
schema: employeesSchema,
source: gqlQuery,
variableValues: {},
contextValue: {}
}).then(gqlResponse => {
expect(gqlResponse).toMatchObject({
"data": {
"employees": [{
"birth_date": "1964-06-01T23:00:00.000Z",
"emp_no": 10002,
"first_name": "Bezalel",
"gender": "F",
"hire_date": "1985-11-20T23:00:00.000Z",
"last_name": "Simmel"
},
{
"birth_date": "1953-04-19T23:00:00.000Z",
"emp_no": 10006,
"first_name": "Anneke",
"gender": "F",
"hire_date": "1989-06-01T22:00:00.000Z",
"last_name": "Preusig"
}]
}
})
})
})
})
})
<|start_filename|>.vscode/settings.json<|end_filename|>
{
"files.autoSave": "onFocusChange",
"prettier.printWidth": 120,
"prettier.tabWidth": 4,
"prettier.useTabs": true,
"prettier.endOfLine": "lf",
"prettier.semi": false
}
<|start_filename|>src/composeWithMysql.js<|end_filename|>
"use strict"
const debug = require("debug")("graphql-compose-mysql")
const mysql = require("mysql")
const mysqlUtilities = require("mysql-utilities")
const { Resolver, SchemaComposer, getProjectionFromAST, clearName } = require("graphql-compose")
const DataLoader = require('dataloader')
const md5 = require('md5')
module.exports = (() => {
let PREFIX = ""
const typeMap = { // TODO: Add spatial type ???
bigint: "Int",
binary: "String",
bit: "Int",
blob: "String",
bool: "Boolean",
boolean: "Boolean",
char: "String",
date: "Date",
datetime: "Date",
dec: "Float",
decimal: "Float",
double: "Float",
enum: "String", // TODO: Use GraphQL Enum type !
float: "Float",
int: "Int",
integer: "Int",
json: "JSON",
longblob: "String",
longtext: "String",
mediumblob: "String",
mediumint: "Int",
mediumtext: "String",
numeric: "Float",
set: "String", // TODO: Use GraphQL Enum/Union type ???
smallint: "Int",
text: "String",
time: "Date",
timestamp: "Date",
tinyblob: "String",
tinyint: "Int",
tinytext: "String",
varbinary: "String",
varchar: "String",
year: "Int",
}
const _clearNameForField = (name) => {
return `${clearName(name)}`
}
const _clearNameForResolver = (name) => {
return `${PREFIX}${clearName(name)}`
}
const _clearNameForType = (name) => {
return `${PREFIX}${clearName(name)}T`
}
const _getMysqlTablesNames = (mysqlConnection) => {
return new Promise((resolve, reject) => {
mysqlConnection.tables(
(err, tables) => !!err ? reject(err) : resolve(Object.keys(tables))
)
})
}
const _getForeignFields = (mysqlConnection, mysqlTableName) => {
return new Promise((resolve, reject) => {
mysqlConnection.foreign(
mysqlTableName,
(err, foreignFields) => {
if (!!err)
reject(err)
else {
resolve(Object.values(foreignFields).map(field => {
return {
columnName: field.COLUMN_NAME,
referencedTableName: field.REFERENCED_TABLE_NAME,
referencedColumnName: field.REFERENCED_COLUMN_NAME
}
}))
}
}
)
})
}
const _retrieveTableFields = (mysqlConnection, mysqlTableName) => {
return new Promise((resolve, reject) => {
mysqlConnection.fields(
mysqlTableName,
(err, fields) => !!err ? reject(err) : resolve(fields)
)
})
}
const _mysqlTypeToGqlType = (mysqlType) => {
const extractBaseType = RegExp("^(\\w+)\\W*", "g")
const baseType = extractBaseType.exec(mysqlType)[1]
const gqlType = typeMap[baseType]
if (!gqlType) {
throw new Error(`No type mapping found for MySQL type ${mysqlType} !`)
}
return gqlType
}
const _buildGqlFieldsFromMysqlTable = async (mysqlConnection, mysqlTableName) => {
const fieldsMap = await _retrieveTableFields(mysqlConnection, mysqlTableName)
const fields = {}
Object.keys(fieldsMap).forEach(field => {
const fieldName = _clearNameForField(fieldsMap[field].Field)
fields[fieldName] = _mysqlTypeToGqlType(fieldsMap[field].Type)
})
return fields
}
const _addKnexInContext = (ns, mysqlConfig) => {
if (!ns.knex) { // initialize the connection pool
ns.knex = require('knex')({
client: "mysql",
connection: Object.assign(mysqlConfig, { multipleStatements: true })
})
}
}
const _buildSelectArgs = (flatProjection) => {
const selectArgs = Object.keys(flatProjection)
return selectArgs
}
const _buildProjectionFromInfo = (info) => {
const projection = Object.entries(getProjectionFromAST(info))
// [ 'emp_no', {} ] or [ 'emp_no', true ] or [ 'departments', { dept_no: {} } ]
// Keep only the scalar field ie. no sub-object ie. either "{}"" or "true" for value
.filter(entry => Object.values(entry[1]).length == 0)
.reduce((acc, entry) => Object.assign(acc, { [entry[0]]: true }), {})
return projection
}
const _addDataLoaderForProjectionInContext = (ns, loaderName, mysqlTableName, projection) => {
if (!ns[loaderName]) { // if needed, create a new dataloader for the current projection
ns[loaderName] = new DataLoader(argsList => {
if (argsList.length > 1) {
const selects = []
const bindings = []
argsList.map(_args => {
const limit = _args._limit
let { _limit, ...args } = _args
let statements = ns.knex(mysqlTableName)
.select(_buildSelectArgs(projection))
.where(args)
if (!!limit) {
statements.limit(limit)
}
statements = statements.toSQL().toNative()
selects.push(statements.sql)
bindings.push(statements.bindings)
})
return ns.knex
.raw(selects.join(";"), [].concat.apply([], bindings))
.then(rows => rows[0])
} else { // argList == 1
const limit = argsList[0]._limit
let { _limit, ...args } = argsList[0]
let statement = ns.knex(mysqlTableName)
.select(_buildSelectArgs(projection))
.where(args)
if (!!limit) {
statement.limit(limit)
}
return statement.then(rows => [rows])
}
}, {
/**
* TODO: How to handle the cache ?
* Should be "one query - one cache"
* but where to hook when a query end in order to clear the cache ?
* the "info" object could be a way...
*/
cache: false
})
}
}
const _buildResolverForGqlType = (mysqlConfig, mysqlTableName, gqlType) => {
return new Resolver({
name: [_clearNameForResolver(mysqlTableName)],
type: [gqlType],
args: Object.assign({ _limit: "Int" }, gqlType.getFields()),
resolve: ({ source, args, context, info }) => {
if (!context) {
throw new Error("You must provide a GraphQL context to the server, even if empty (e.g. contextValue: {})")
}
/**
* Use a namespace specific to the current mysqlConfig to avoid collisions in context
*/
const namespace = `gqlComposeMysql${md5(JSON.stringify(mysqlConfig))}`
context[namespace] = !context[namespace] ? {} : context[namespace]
_addKnexInContext(context[namespace], mysqlConfig)
const projection = _buildProjectionFromInfo(info)
const projectionHash = md5(JSON.stringify(projection))
const loaderName = `${_clearNameForResolver(mysqlTableName)}${projectionHash}`
_addDataLoaderForProjectionInContext(context[namespace], loaderName, mysqlTableName, projection)
return context[namespace][loaderName].load(args)
}
})
}
// public interfaces
return {
composeWithMysql: async (opts) => {
if (!opts) {
throw new Error("You must provide arguments when calling composeWithMysql()")
}
if (!opts.mysqlConfig) {
throw new Error("You must provide a 'mysqlConfig' argument for the database.")
}
if (!opts.prefix) {
opts.prefix = ""
}
PREFIX = Object.freeze(opts.prefix)
// TODO optimize schema creation (use a pool instead of a single connection ?)
const mysqlConnection = mysql.createConnection(opts.mysqlConfig)
// Mix-in for Data Access Methods and SQL Autogenerating Methods
mysqlUtilities.upgrade(mysqlConnection)
// Mix-in for Introspection Methods
mysqlUtilities.introspection(mysqlConnection)
// initialize the graphQL schema
const schemaComposer = new SchemaComposer()
const mysqlTablesNames = await _getMysqlTablesNames(mysqlConnection)
return Promise.all(mysqlTablesNames.map(async mysqlTableName => {
// initialize the graphql type built from the mysql table
const gqlTC = schemaComposer.TypeComposer.create({
name: _clearNameForType(mysqlTableName),
})
// add local fields
const fields = await _buildGqlFieldsFromMysqlTable(mysqlConnection, mysqlTableName)
gqlTC.addFields(fields)
// add local resolver
const resolver = _buildResolverForGqlType(opts.mysqlConfig, mysqlTableName, gqlTC)
schemaComposer.Query.addFields({ [resolver.name]: resolver })
})).then(_ => {
return Promise.all(mysqlTablesNames.map(async mysqlTableName => {
const foreignFields = await _getForeignFields(mysqlConnection, mysqlTableName)
/**
* [ { columnName: 'dept_no',
referencedTableName: 'departments',
referencedColumnName: 'dept_no' },
{ columnName: 'emp_no',
referencedTableName: 'employees',
referencedColumnName: 'emp_no' } ]
*/
// add foreign fields
foreignFields.forEach(foreignField => {
const localTC = schemaComposer.get(_clearNameForType(mysqlTableName))
const foreignResolver = schemaComposer.Query.getField(_clearNameForResolver(foreignField.referencedTableName))
localTC.addRelation(
_clearNameForField(foreignField.referencedTableName),
{
resolver: () => foreignResolver,
prepareArgs: {
[_clearNameForField(foreignField.referencedColumnName)]: source => {
return source[_clearNameForField(foreignField.columnName)]
},
},
projection: { [_clearNameForField(foreignField.columnName)]: true }
}
)
})
})).then(_ => {
mysqlConnection.end()
// build the graphQL schema
const gqlSchema = schemaComposer.buildSchema()
//console.log(printSchema(gqlSchema))
return gqlSchema
})
})
}
}
})()
<|start_filename|>tests/basic.test.js<|end_filename|>
const GraphQL = require("graphql")
const { composeWithMysql } = require("../src/main")
describe("Basic test", () => {
jest.setTimeout(30000)
test("get all fields for employee n°10001", () => {
return composeWithMysql({
mysqlConfig: {
//debug: ['ComQueryPacket'],
//connectionLimit: 100,
prefix: "emp_",
host: "localhost",
port: 3306,
user: "root",
password: "<PASSWORD>",
database: "employees",
},
}).then(employeesSchema => {
const gqlQuery = `
{
employees(emp_no: 10001) {
emp_no
first_name
last_name
gender
birth_date
hire_date
}
}`
return GraphQL.graphql({
schema: employeesSchema,
source: gqlQuery,
variableValues: {},
contextValue: {}
}).then(gqlResponse => {
expect(gqlResponse).toMatchObject({
data: {
employees: [{
emp_no: 10001,
first_name: "Georgi",
last_name: "Facello",
gender: "M",
birth_date: "1953-09-01T23:00:00.000Z",
hire_date: "1986-06-25T22:00:00.000Z"
}]
}
})
})
})
})
test("get some fields from some employees multiples times (dataloader test)", () => {
return composeWithMysql({
mysqlConfig: {
host: "localhost",
port: 3306,
user: "root",
password: "<PASSWORD>",
database: "employees",
},
}).then(employeesSchema => {
const gqlQuery = `
{
emp_10001: employees(emp_no: 10001) {
last_name
gender
birth_date
hire_date
}
emp_10001bis: employees(emp_no: 10001) {
last_name
gender
birth_date
hire_date
}
emp_10001ter: employees(emp_no: 10001, gender:"M") {
last_name
gender
birth_date
hire_date
}
emp_10002: employees(emp_no: 10002, gender:"F") {
last_name
gender
birth_date
hire_date
}
emp_10003: employees(emp_no: 10003) {
emp_no
first_name
last_name
gender
birth_date
hire_date
}
emp_10001c: employees(emp_no: 10001) {
last_name
gender
birth_date
hire_date
}
emp_10001d: employees(emp_no: 10001) {
last_name
gender
birth_date
hire_date
}
}`
return GraphQL.graphql({
schema: employeesSchema,
source: gqlQuery,
variableValues: {},
contextValue: {}
}).then(gqlResponse => {
expect(gqlResponse).toMatchObject({
data: {
emp_10001: [
{
last_name: "Facello",
gender: "M",
birth_date: "1953-09-01T23:00:00.000Z",
hire_date: "1986-06-25T22:00:00.000Z"
}
],
emp_10001bis: [
{
last_name: "Facello",
gender: "M",
birth_date: "1953-09-01T23:00:00.000Z",
hire_date: "1986-06-25T22:00:00.000Z"
}
],
emp_10001ter: [
{
last_name: "Facello",
gender: "M",
birth_date: "1953-09-01T23:00:00.000Z",
hire_date: "1986-06-25T22:00:00.000Z"
}
],
emp_10002: [
{
"last_name": "Simmel",
"gender": "F",
"birth_date": "1964-06-01T23:00:00.000Z",
"hire_date": "1985-11-20T23:00:00.000Z"
}
],
emp_10003: [
{
"emp_no": 10003,
"first_name": "Parto",
"last_name": "Bamford",
"gender": "M",
"birth_date": "1959-12-02T23:00:00.000Z",
"hire_date": "1986-08-27T22:00:00.000Z"
}
],
emp_10001c: [
{
last_name: "Facello",
gender: "M",
birth_date: "1953-09-01T23:00:00.000Z",
hire_date: "1986-06-25T22:00:00.000Z"
}
],
emp_10001d: [
{
last_name: "Facello",
gender: "M",
birth_date: "1953-09-01T23:00:00.000Z",
hire_date: "1986-06-25T22:00:00.000Z"
}
]
}
})
})
})
})
})
<|start_filename|>tests/join.test.js<|end_filename|>
const debug = require("debug")("graphql-compose-mysql")
const GraphQL = require("graphql")
const { composeWithMysql } = require("../src/main")
describe("Join test", () => {
jest.setTimeout(30000)
test("get dept_manager with join on employees and departments", () => {
return composeWithMysql({
mysqlConfig: {
//connectionLimit: 100,
host: "localhost",
port: 3306,
user: "root",
password: "<PASSWORD>",
database: "employees",
},
}).then(employeesSchema => {
const gqlQuery = `
{
dept_manager(dept_no: "d003") {
from_date
employees {
emp_no
last_name
}
departments {
dept_name
}
}
}`
return GraphQL.graphql({
schema: employeesSchema,
source: gqlQuery,
variableValues: {},
contextValue: {}
}).then(gqlResponse => {
expect(gqlResponse).toMatchObject(
{
"data": {
"dept_manager": [
{
"from_date": "1984-12-31T23:00:00.000Z",
"employees": [
{
"emp_no": 110183,
"last_name": "Ossenbruggen"
}
],
"departments": [
{
"dept_name": "Human Resources"
}
]
},
{
"from_date": "1992-03-20T23:00:00.000Z",
"employees": [
{
"emp_no": 110228,
"last_name": "Sigstam"
}
],
"departments": [
{
"dept_name": "Human Resources"
}
]
}
]
}
}
)
})
})
})
}) | thejibz/graphql-compose-mysql |
<|start_filename|>lib/convert.js<|end_filename|>
'use strict';
/**
* Convert units
*/
const convert = {
celsiusToFahrenheit: function(celsius) {
return celsius * 1.8 + 32;
},
feetToMeters: function(feet) {
return feet * 0.3048;
},
milesToMeters: function(miles) {
return miles * 1609.344;
},
metersToMiles: function(meters) {
return meters / 1609.344;
},
inhgToKpa: function(inHg) {
return inHg / 0.29529988;
},
kpaToInhg: function(kpa) {
return kpa * 0.29529988;
},
kphToMps: function(kph) {
return kph / 3600 * 1000;
},
mpsToKts: function(mps) {
return mps * 1.9438445;
},
ktsToMps: function(kts) {
return kts / 1.9438445;
}
};
module.exports = convert;
<|start_filename|>test/metar-parser.js<|end_filename|>
'use strict';
const assert = require('assert');
const metarParser = require('../lib/metar-parser');
const convert = require('../lib/convert');
describe('metarParser', function() {
const metarTestcases = [
{
source: 'https://aviationweather.gov/metar#1',
metarCode: "KEYW 041053Z AUTO 13005KT 10SM CLR 24/22 A3000 RMK AO2 SLP159 T02440222",
expectedValues: [
['icao', 'KEYW'],
['wind', {degrees: 130, speed_kts: 5, gust_kts: 5}],
['visibility', { meters: "16000", meters_float: convert.milesToMeters(10) }],
['temperature', { celsius: 24, fahrenheit: 75.2 }],
['barometer', {kpa: 3000 / 10 / 2.9529988 }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'https://aviationweather.gov/metar#2',
metarCode: "KACV 041053Z AUTO 07003KT 10SM CLR 04/04 A3001 RMK AO2 SLP169 T00440039",
expectedValues: [
['icao', 'KACV'],
['wind', {degrees: 70, speed_kts: 3, gust_kts: 3}],
['visibility', { meters: "16000", meters_float: convert.milesToMeters(10) }],
['temperature', { celsius: 4, fahrenheit: 39.2 }],
['barometer', { hg: 30.01, kpa: convert.inhgToKpa(30.01) }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'https://api.checkwx.com/#1',
metarCode: "KPIE 260853Z AUTO 02013G17KT 10SM CLR 17/07 A2998 RMK AO2 SLP153 T01720072 57000",
expectedValues: [
['icao', 'KPIE'],
['wind', {degrees: 20, speed_kts: 13, gust_kts: 17}],
['visibility', { meters: "16000", meters_float: convert.milesToMeters(10) }],
['temperature', { celsius: 17, fahrenheit: 62.6 }],
['barometer', { hg: 29.98, kpa: convert.inhgToKpa(29.98) }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'https://api.checkwx.com/#2',
metarCode: "KSPG 260853Z AUTO 05012KT 10SM CLR 18/09 A2997 RMK AO2 SLP148 T01830094 53001",
expectedValues: [
['icao', 'KSPG'],
['wind', {degrees: 50, speed_kts: 12, gust_kts: 12}],
['visibility', { meters: "16000", meters_float: convert.milesToMeters(10) }],
['temperature', { celsius: 18, fahrenheit: 64.4 }],
['barometer', {kpa: 2997 / 10 / 2.9529988 }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'https://de.wikipedia.org/wiki/METAR',
metarCode: "EDDS 081620Z 29010KT 9999 FEW040TCU 09/M03 Q1012 NOSIG",
expectedValues: [
['icao', 'EDDS'],
['wind', {
degrees: 290,
speed_kts: 10,
speed_mps: convert.ktsToMps(10),
gust_kts: 10,
gust_mps: convert.ktsToMps(10)
}],
['visibility', { meters: "10000", meters_float: 9999 }],
['clouds', [{base_feet_agl: 4000, code: 'FEW'}]],
['temperature', { celsius: 9, fahrenheit: 48.2 }],
['barometer', {kpa: 101.2 }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'https://en.wikipedia.org/wiki/METAR#1',
metarCode: "METAR LBBG 041600Z 12012MPS 090V150 1400 R04/P1500N R22/P1500U +SN BKN022 OVC050 M04/M07 Q1020 NOSIG 8849//91=",
expectedValues: [
['icao', 'LBBG'],
['wind', {
degrees: 120,
speed_kts: convert.mpsToKts(12),
speed_mps: 12,
gust_kts: convert.mpsToKts(12),
gust_mps: 12
}],
['visibility', { meters: "1500", meters_float: 1400 }],
['conditions', [ {code: '+'}, {code: 'SN'} ]],
['clouds', [{base_feet_agl: 2200, code: 'BKN'}, {base_feet_agl: 5000, code: 'OVC'}]],
['temperature', { celsius: -4, fahrenheit: 24.8 }],
['barometer', {kpa: 102.0 }],
['flight_category', 'LIFR']
],
output: false
},
{
source: 'https://en.wikipedia.org/wiki/METAR#2',
metarCode: "METAR KTTN 051853Z 04011KT 1/2SM VCTS SN FZFG BKN003 OVC010 M02/M02 A3006 RMK AO2 TSB40 SLP176 P0002 T10171017=",
expectedValues: [
['icao', 'KTTN'],
['wind', {degrees: 40, speed_kts: 11, gust_kts: 11}],
['visibility', { meters: "1000", meters_float: convert.milesToMeters(0.5) }],
['conditions', [ {code: 'VC'}, {code: 'TS'}, {code: 'SN'}, {code: 'FZ'}, {code: 'FG'} ]],
['clouds', [{base_feet_agl: 300, code: 'BKN'}, {base_feet_agl: 1000, code: 'OVC'}]],
['temperature', { celsius: -2, fahrenheit: 28.4 }],
['barometer', {kpa: convert.inhgToKpa(30.06) }],
['flight_category', 'LIFR']
],
output: false
},
{
source: 'https://en.allmetsat.com/metar-taf/#1',
metarCode: "KEYW 041053Z AUTO 13005KT 10SM CLR 24/22 A3000 RMK AO2 SLP159 T02440222",
expectedValues: [
['icao', 'KEYW'],
['wind', {degrees: 130, speed_kts: 5, gust_kts: 5}],
['visibility', { meters: "16000", meters_float: convert.milesToMeters(10) }],
['temperature', { celsius: 24 }],
['barometer', {kpa: 3000 / 10 / 2.9529988 }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'https://en.allmetsat.com/metar-taf/#2',
metarCode: "EDDH 041050Z 29013KT 6000 SCT006 BKN009 04/03 Q1028 TEMPO BKN012",
expectedValues: [
['icao', 'EDDH'],
['wind', {degrees: 290, speed_kts: 13, gust_kts: 13}],
['visibility', { meters: "6000", meters_float: 6000 }],
['clouds', [{base_feet_agl: 600, code: 'SCT'}, {base_feet_agl: 900, code: 'BKN'}]],
['temperature', { celsius: 4, fahrenheit: 39.2 }],
['barometer', {kpa: 102.8 }],
['flight_category', 'IFR']
],
output: false
},
{
source: 'https://en.allmetsat.com/metar-taf/#3',
metarCode: "ETEB 041056Z AUTO 26010KT 9999 SCT090 00/M01 A3052 RMK AO2 SLP378 T10031013",
expectedValues: [
['icao', 'ETEB'],
['wind', {degrees: 260, speed_kts: 10, gust_kts: 10}],
['visibility', { meters: "10000", meters_float: 9999 }],
['clouds', [{base_feet_agl: 9000, code: 'SCT'}]],
['temperature', { celsius: 0, fahrenheit: 32 }],
['barometer', {kpa: 3052 / 10 / 2.9529988 }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'https://aviationweather.gov/metar/#3',
metarCode: "KEYW 050653Z AUTO 19006KT FEW024 BKN039 26/23 A3000 RMK AO2 LTG DSNT W SLP159 T02610228",
expectedValues: [
['icao', 'KEYW'],
['wind', {degrees: 190, speed_kts: 6, gust_kts: 6}],
['clouds', [{base_feet_agl: 2400, code: 'FEW'}, {base_feet_agl: 3900, code: 'BKN'}]],
['temperature', { celsius: 26 }],
['barometer', {kpa: 3000 / 10 / 2.9529988 }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'https://api.checkwx.com/#2019-01-07',
metarCode: 'KSFO 070121Z 19023KT 1 1/2SM R28R/6000VP6000FT -RA BKN004 BKN013 OVC035 15/12 A2970 RMK AO2 T01500122 PNO $',
expectedValues: [
['icao', 'KSFO'],
['conditions', [ {code: '-'}, {code: 'RA'} ]],
['visibility', { meters: "2500", meters_float: convert.milesToMeters(1.5) }],
['temperature', { celsius: 15, fahrenheit: 59 }],
['dewpoint', { celsius: 12, fahrenheit: 53.6 }],
['barometer', {kpa: 2970 / 10 / 2.9529988, mb: 2970 / 2.9529988 }],
['flight_category', 'IFR']
],
output: false
},
{
source: 'EHAM with CAVOK',
metarCode: 'EHAM 100125Z 33004KT CAVOK M00/M01 Q1026 NOSIG',
expectedValues: [
['visibility', { miles_float: 10, meters: "16000", meters_float: convert.milesToMeters(10) }],
['temperature', { celsius: -0, fahrenheit: 32 }],
['dewpoint', { celsius: -1, fahrenheit: 30.2 }],
['barometer', { hg: 102.6, kpa: 102.6, mb: 10.26 }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'Without VIS because it is CLR',
metarCode: 'KEYW 291553Z VRB03KT CLR 17/09 A3009 RMK AO2 SLP189 T01670089 $',
expectedValues: [
['visibility', { miles_float: 10, meters: "16000", meters_float: convert.milesToMeters(10) }],
['wind', {degrees: 180, speed_kts: 3, gust_kts: 3}],
['temperature', { celsius: 17 }],
['dewpoint', { celsius: 9 }],
['flight_category', 'VFR']
],
output: false
},
{
source: 'AUTO does stuff?',
metarCode: 'KDVO 022335Z AUTO 4SM BR BKN007 BKN013 12/12 A2988 RMK AO2',
expectedValues: [
['visibility', { miles_float: 4, meters: "6500", meters_float: convert.milesToMeters(4) }]
],
output: false
}
];
metarTestcases.forEach(function(test) {
it('must convert METAR string from ' + test.source, function() {
const metarData = metarParser(test.metarCode);
if (test.output) {
console.log(metarData);
}
assert.ok(metarData);
assert.ok(metarData.raw_text);
assert.ok(metarData.raw_parts);
assert.ok(metarData.raw_parts.length > 2);
if (metarData.visibility) {
assert.ok(metarData.visibility.miles);
assert.ok(metarData.visibility.meters);
}
if (metarData.clouds && metarData.clouds[0]) {
assert.ok(metarData.clouds[0].code);
assert.ok(metarData.clouds[0].base_feet_agl);
assert.ok(metarData.clouds[0].base_meters_agl);
}
if (metarData.ceiling) {
assert.ok(metarData.ceiling.code);
assert.ok(metarData.ceiling.feet_agl);
assert.ok(metarData.ceiling.meters_agl);
}
test.expectedValues.forEach((valueTest) => {
assert.ok(metarData[valueTest[0]], 'Key present: ' + valueTest[0]);
// Add missing values for tests
switch (valueTest[0]) {
case 'clouds':
valueTest[1] = valueTest[1].map((cloud) => {
if (!cloud.base_meters_agl) {
cloud.base_meters_agl = convert.feetToMeters(cloud.base_feet_agl);
}
return cloud;
});
break;
}
// Actual testing
switch (valueTest[0]) {
case 'temperature':
case 'dewpoint':
assert.deepStrictEqual(metarData[valueTest[0]].celsius, valueTest[1].celsius, 'celsius match');
break;
case 'barometer':
assert.deepStrictEqual(metarData[valueTest[0]].kpa, valueTest[1].kpa, 'kpa match');
break;
case 'wind':
assert.deepStrictEqual(metarData[valueTest[0]].degrees, valueTest[1].degrees, 'degrees match');
assert.deepStrictEqual(metarData[valueTest[0]].speed_kts, valueTest[1].speed_kts, 'speed_kts match');
assert.deepStrictEqual(metarData[valueTest[0]].gust_kts, valueTest[1].gust_kts, 'gust_kts match');
if (valueTest[1].speed_mps) {
assert.deepStrictEqual(metarData[valueTest[0]].speed_mps, valueTest[1].speed_mps, 'speed_mps match');
}
if (valueTest[1].gust_mps) {
assert.deepStrictEqual(metarData[valueTest[0]].gust_mps, valueTest[1].gust_mps, 'gust_mps match');
}
break;
case 'visibility':
assert.deepStrictEqual(metarData[valueTest[0]].meters, valueTest[1].meters, 'meters match');
break;
default:
assert.deepStrictEqual(metarData[valueTest[0]], valueTest[1], 'Exact value match: ' + valueTest[0]);
break;
}
});
});
});
});
<|start_filename|>test/convert.js<|end_filename|>
'use strict';
const assert = require('assert');
const convert = require('../lib/convert');
describe('convert', function() {
it('must convert celsius in fahrenheit', function() {
const fahrenheit = convert.celsiusToFahrenheit(1);
//console.log(fahrenheit);
assert.ok(fahrenheit > 33.75);
assert.ok(fahrenheit < 33.85);
});
it('must convert kpa in hgin', function() {
const hgin = convert.kpaToInhg(1);
// console.log(hgin);
assert.ok(hgin > 0.29);
assert.ok(hgin < 0.30);
});
it('must convert hgin in kpa', function() {
const kpa = convert.inhgToKpa(1);
//console.log(kpa);
assert.ok(kpa > 3.38);
assert.ok(kpa < 3.39);
});
it('must convert miles in meters', function() {
const meters = convert.milesToMeters(1);
//console.log(meters);
assert.ok(meters > 1609);
assert.ok(meters < 1610);
});
it('must convert km/h in m/s', function() {
const mps = convert.kphToMps(1);
//console.log(mps);
assert.ok(mps > 0.277);
assert.ok(mps < 0.278);
});
it('must convert m/s to kts', function() {
const kts = convert.mpsToKts(1);
//console.log(kts);
assert.ok(kts > 1.94);
assert.ok(kts < 1.95);
});
it('must convert kts to m/s', function() {
const mps = convert.ktsToMps(1);
//console.log(mps);
assert.ok(mps > 0.51);
assert.ok(mps < 0.52);
});
});
<|start_filename|>lib/metar-parser.js<|end_filename|>
'use strict';
const convert = require('./convert');
/**
* Convert METAR string into structured object.
* @see https://api.checkwx.com/#31-single
* @see https://www.skybrary.aero/index.php/Meteorological_Terminal_Air_Report_(METAR)
* @param {String} metarString raw
* @returns {Object} with structured information. The object resembles the API
* reponse of the data property of https://api.checkwx.com/#31-single
*/
const metarParser = function(metarString) {
let metarArray = metarString
.trim()
.replace(/^METAR\S*?\s/, '')
.replace(/(\s)(\d)\s(\d)\/(\d)(SM)/, function(all, a, b, c, d, e) {
// convert visbility range like `1 1/2 SM`
return a + (Number(b) * Number(d) + Number(c)) + '/' + d + e;
})
.split(' ')
;
if (metarArray.length < 3) {
throw new Error('Not enough METAR information found');
}
let metarObject = {
raw_text: metarString,
raw_parts: metarArray
};
const _private = {};
/**
* @see http://andrew.rsmas.miami.edu/bmcnoldy/Humidity.html
* @param {Number} temp in celsius
* @param {Number} dew in celsius
* @returns {Number} humidity in 1/100
*/
_private.calcHumidity = function(temp, dew) {
return Math.exp(
(17.625 * dew) / (243.04 + dew)
) / Math.exp(
(17.625 * temp) / (243.04 + temp)
);
};
/**
* @param {Number} value dito
* @param {Number} toNext round to next full xxxx
* @returns {Number} rounded value
*/
_private.round = function(value, toNext = 500) {
return Math.round(value / toNext) * toNext;
};
// ---------------------------------------------------------------------------
let mode = 0;
metarArray.forEach((metarPart) => {
let match;
if (mode < 3 && metarPart.match(/^(\d+)(?:\/(\d+))?(SM)?$/)) {
mode = 3; // no wind reported
}
if (mode < 5 && metarPart.match(/^(FEW|SCT|BKN|OVC)(\d+)?/)) {
mode = 5; // no visibility / conditions reported
}
if (mode < 6 && metarPart.match(/^M?\d+\/M?\d+$/)) {
mode = 6; // end of clouds
}
switch (mode) {
case 0:
// ICAO Code
metarObject.icao = metarPart;
mode = 1;
break;
case 1:
// Observed Date
match = metarPart.match(/^(\d\d)(\d\d)(\d\d)Z$/);
if (match) {
metarObject.observed = new Date();
metarObject.observed.setUTCDate(Number(match[1]));
metarObject.observed.setUTCHours(Number(match[2]));
metarObject.observed.setUTCMinutes(Number(match[3]));
mode = 2;
}
break;
case 2:
// Wind
match = metarPart.match(/^(\d\d\d|VRB)P?(\d+)(?:G(\d+))?(KT|MPS|KPH)/);
if (match) {
match[2] = Number(match[2]);
match[3] = match[3] ? Number(match[3]) : match[2];
if (match[4] === 'KPH') {
match[2] = convert.kphToMps(match[2]);
match[3] = convert.kphToMps(match[3]);
match[4] = 'MPS';
}
metarObject.wind = {
degrees: (match[1] === 'VRB') ? 180 : Number(match[1]),
speed_kts: (match[4] === 'MPS') ? convert.mpsToKts(match[2]) : match[2],
speed_mps: (match[4] === 'MPS') ? match[2] : convert.ktsToMps(match[2]),
gust_kts: (match[4] === 'MPS') ? convert.mpsToKts(match[3]) : match[3],
gust_mps: (match[4] === 'MPS') ? match[3] : convert.ktsToMps(match[3])
};
if (match[1] === 'VRB') {
metarObject.wind.degrees_from = 0;
metarObject.wind.degrees_to = 359;
}
mode = 3;
}
break;
case 3:
// Visibility
match = metarPart.match(/^(\d+)(?:\/(\d+))?(SM)?$/);
if (match) {
match[1] = (match[2])
? Number(match[1]) / Number(match[2])
: Number(match[1])
;
metarObject.visibility = {
miles: (match[3] && match[3] === 'SM') ? match[1] : convert.metersToMiles(match[1]),
miles_float: (match[3] && match[3] === 'SM') ? match[1] : convert.metersToMiles(match[1]),
meters: (match[3] && match[3] === 'SM') ? convert.milesToMeters(match[1]) : match[1],
meters_float: (match[3] && match[3] === 'SM') ? convert.milesToMeters(match[1]) : match[1]
};
mode = 4;
} else if (metarPart === 'CAVOK' || metarPart === 'CLR') {
metarObject.visibility = {
miles: 10,
miles_float: 10,
meters: convert.milesToMeters(10),
meters_float: convert.milesToMeters(10)
};
mode = 5; // no clouds & conditions reported
} else if (metarObject.wind) {
// Variable wind direction
match = metarPart.match(/^(\d+)V(\d+)$/);
if (match) {
metarObject.wind.degrees_from = Number(match[1]);
metarObject.wind.degrees_to = Number(match[2]);
}
}
break;
case 4:
// Conditions
match = metarPart.match(/^(\+|-|VC|RE)?([A-Z][A-Z])([A-Z][A-Z])?([A-Z][A-Z])?$/);
if (match) {
if (!metarObject.conditions) {
metarObject.conditions = [];
}
match
.filter((m, index) => {
return (index !== 0 && m);
})
.forEach((m) => {
metarObject.conditions.push({ code: m });
})
;
// may occur multiple times
}
break;
case 5:
// Clouds
match = metarPart.match(/^(FEW|SCT|BKN|OVC)(\d+)/);
if (match) {
if (!metarObject.clouds) {
metarObject.clouds = [];
}
match[2] = Number(match[2]) * 100;
let cloud = {
code: match[1],
base_feet_agl: match[2],
base_meters_agl: convert.feetToMeters(match[2])
};
metarObject.clouds.push(cloud);
}
break;
case 6:
// Temperature
match = metarPart.match(/^(M?\d+)\/(M?\d+)$/);
if (match) {
match[1] = Number(match[1].replace('M', '-'));
match[2] = Number(match[2].replace('M', '-'));
metarObject.temperature = {
celsius: match[1],
fahrenheit: convert.celsiusToFahrenheit(match[1])
};
metarObject.dewpoint = {
celsius: match[2],
fahrenheit: convert.celsiusToFahrenheit(match[2])
};
metarObject.humidity_percent = _private.calcHumidity(match[1], match[2]) * 100;
mode = 7;
}
break;
case 7:
// Pressure
match = metarPart.match(/^(Q|A)(\d+)/);
if (match) {
match[2] = Number(match[2]);
match[2] /= (match[1] === 'Q') ? 10 : 100;
metarObject.barometer = {
hg: (match[1] === 'Q') ? convert.kpaToInhg(match[2]) : match[2],
kpa: (match[1] === 'Q') ? match[2] : convert.inhgToKpa(match[2]),
mb: (match[1] === 'Q') ? match[2] * 10 : convert.inhgToKpa(match[2] * 10)
};
mode = 8;
}
break;
}
});
if (!metarObject.visibility) {
metarObject.visibility = {
miles: 10,
miles_float: 10,
meters: convert.milesToMeters(10),
meters_float: convert.milesToMeters(10)
};
}
// Finishing touches
metarObject.visibility.miles = String(_private.round(metarObject.visibility.miles, 0.5));
metarObject.visibility.meters = String(_private.round(metarObject.visibility.meters));
if (metarObject.clouds) {
const highestCloud = metarObject.clouds[metarObject.clouds.length - 1];
metarObject.ceiling = {
code: highestCloud.code,
feet_agl: highestCloud.base_feet_agl,
meters_agl: highestCloud.base_meters_agl
};
}
metarObject.flight_category = '';
if (metarObject.visibility.miles_float > 5
&& (!metarObject.ceiling || metarObject.ceiling.feet_agl > 3000)
) {
metarObject.flight_category = 'VFR';
} else if (metarObject.visibility.miles_float >= 3
&& (!metarObject.ceiling || metarObject.ceiling.feet_agl >= 1000)
) {
metarObject.flight_category = 'MVFR';
} else if (metarObject.visibility.miles_float >= 1
&& (!metarObject.ceiling || metarObject.ceiling.feet_agl >= 500)
) {
metarObject.flight_category = 'IFR';
} else {
metarObject.flight_category = 'LIFR';
}
return metarObject;
};
module.exports = metarParser;
| fboes/metar-parser |
<|start_filename|>fourq.go<|end_filename|>
// Package fourq implements FourQ, a high-speed elliptic curve at the 128-bit
// security level.
//
// https://eprint.iacr.org/2015/565.pdf
package fourq
func multByCofactor(pt *point) {
temp := (&point{}).Set(pt)
feMul(&temp.t, &temp.t, d)
pDbl(pt)
pMixedAdd(pt, temp)
pDbl(pt)
pDbl(pt)
pDbl(pt)
pDbl(pt)
pMixedAdd(pt, temp)
pDbl(pt)
pDbl(pt)
pDbl(pt)
}
func scalarBaseMult(k []byte) *point {
if len(k) > 32 {
return nil
}
K := make([]byte, 32)
copy(K[32-len(k):], k)
sum := newPoint()
for i := 0; i < 4; i++ {
for bit := uint(0); bit < 8; bit++ {
var idx byte
for block := 0; block < 8; block++ {
idx = 2*idx + ((K[4*block+i] >> (7 - bit)) & 1)
}
pDbl(sum)
if idx != 0 {
pMixedAdd(sum, generatorBase[idx])
}
}
}
return sum
}
func scalarMult(pt *point, k []byte, clearCofactor bool) *point {
if clearCofactor {
multByCofactor(pt)
pt.MakeAffine()
}
feMul(&pt.t, &pt.t, d)
sum := newPoint()
for _, byte := range k {
for bit := 0; bit < 8; bit++ {
pDbl(sum)
if byte&0x80 == 0x080 {
pMixedAdd(sum, pt)
}
byte <<= 1
}
}
return sum
}
// IsOnCurve returns true if pt represents a compressed point on the curve
// (including the identity point and points in a non-prime order subgroup) and
// false otherwise.
func IsOnCurve(pt [32]byte) bool {
_, ok := newPoint().SetBytes(pt)
return ok
}
// IsOnCurveU returns true if pt represents an uncompressed point on the curve.
func IsOnCurveU(pt [64]byte) bool {
_, ok := newPoint().SetBytesU(pt)
return ok
}
// ScalarBaseMult returns the generator multiplied by scalar k, compressed. k's
// slice should be 32 bytes long or shorter (or the function will return nil and
// false).
func ScalarBaseMult(k []byte) ([32]byte, bool) {
pt := scalarBaseMult(k)
if pt == nil {
return [32]byte{}, false
}
return pt.Bytes(), true
}
// ScalarBaseMultU returns the generator multiplied by scalar k, uncompressed.
func ScalarBaseMultU(k []byte) ([64]byte, bool) {
pt := scalarBaseMult(k)
if pt == nil {
return [64]byte{}, false
}
return pt.BytesU(), true
}
// ScalarMult returns the compressed point multiplied by scalar k. The function
// returns false if pt does not represent a point on the curve, of if the output
// is the identity point. When clearCofactor=true, it additionally returns false
// when pt is not in the prime-order subgroup.
func ScalarMult(pt [32]byte, k []byte, clearCofactor bool) ([32]byte, bool) {
in, ok := (&point{}).SetBytes(pt)
if !ok {
return [32]byte{}, false
}
out := scalarMult(in, k, clearCofactor).Bytes()
return out, out != [32]byte{1}
}
// ScalarMultU returns the uncompressed point multiplied by scalar k.
func ScalarMultU(pt [64]byte, k []byte, clearCofactor bool) ([64]byte, bool) {
in, ok := (&point{}).SetBytesU(pt)
if !ok {
return [64]byte{}, false
}
out := scalarMult(in, k, clearCofactor).BytesU()
return out, out != uncompressedIdentity
}
<|start_filename|>field.go<|end_filename|>
package fourq
import (
"fmt"
)
// fieldElem implements a field of size p² as a quadratic extension of the base
// field where i²=-1.
type fieldElem struct {
x, y baseFieldElem // value is x+yi.
}
func newFieldElem() *fieldElem {
return &fieldElem{}
}
func (e *fieldElem) String() string {
return fmt.Sprintf("[%v, %v]", &e.x, &e.y)
}
func (e *fieldElem) GoString() string {
return fmt.Sprintf("fieldElem{x: %#v, y: %#v}", &e.x, &e.y)
}
func (e *fieldElem) Set(a *fieldElem) *fieldElem {
e.x.Set(&a.x)
e.y.Set(&a.y)
return e
}
func (e *fieldElem) SetOne() {
e.x.SetOne()
e.y.SetZero()
}
func (e *fieldElem) IsZero() bool {
return e.x.IsZero() && e.y.IsZero()
}
func (e *fieldElem) Neg(a *fieldElem) *fieldElem {
e.x.Neg(&a.x)
e.y.Neg(&a.y)
return e
}
func (e *fieldElem) Invert(a *fieldElem) *fieldElem {
t1 := newBaseFieldElem()
t2 := newBaseFieldElem()
bfeSquare(t1, &a.x)
bfeSquare(t2, &a.y)
bfeAdd(t1, t1, t2)
t1.Invert(t1)
bfeMul(&e.x, &a.x, t1)
t1.Neg(t1)
bfeMul(&e.y, &a.y, t1)
return e
}
func (e *fieldElem) reduce() {
e.x.reduce()
e.y.reduce()
}
// sign returns the "sign" of e -- either 0 or 1, used to distinguish e from -e.
func (e *fieldElem) sign() uint64 {
if e.x.IsZero() {
return e.y[1] >> 62
}
return e.x[1] >> 62
}
//go:noescape
func feDbl(c, a *fieldElem)
//go:noescape
func feAdd(c, a, b *fieldElem)
//go:noescape
func feSub(c, a, b *fieldElem)
//go:noescape
func feMul(c, a, b *fieldElem)
//go:noescape
func feSquare(c, a *fieldElem)
<|start_filename|>point.go<|end_filename|>
package fourq
import (
"fmt"
)
type point struct {
x, y, z, t fieldElem
}
func newPoint() *point {
pt := &point{}
pt.y.x[0] = 1
pt.z.x[0] = 1
return pt
}
func (c *point) String() string {
return fmt.Sprintf("point(\n\tx: %v,\n\ty: %v,\n\tz: %v,\n\tt: %v\n)", &c.x, &c.y, &c.z, &c.t)
}
func (c *point) GoString() string {
return fmt.Sprintf("&point{\n\tx: %#v,\n\ty: %#v,\n\tz: %#v,\n\tt: %#v,\n}", &c.x, &c.y, &c.z, &c.t)
}
func (c *point) Set(a *point) *point {
c.x.Set(&a.x)
c.y.Set(&a.y)
c.t.Set(&a.t)
c.z.Set(&a.z)
return c
}
// SetBytes decompresses the point pt and stores it in c. It returns c and
// true if decompression succeeded; false if not.
func (c *point) SetBytes(pt [32]byte) (*point, bool) {
c.y.x.SetBytes(pt[:16])
c.y.y.SetBytes(pt[16:])
c.z.SetOne()
// Separate p.y from the sign of x.
var s uint64
s, c.y.y[1] = uint64(c.y.y[1])>>63, c.y.y[1]&aMask
if c.y.x[1]>>63 == 1 {
return nil, false
}
// Recover x coordinate from y, up to a multiple of plus/minus one.
u, v := newFieldElem(), newFieldElem()
feSquare(u, &c.y)
feSub(u, u, one)
feSquare(v, &c.y)
feMul(v, v, d)
feAdd(v, v, one)
t0, temp := newBaseFieldElem(), newBaseFieldElem()
bfeMul(t0, &u.x, &v.x)
bfeMul(temp, &u.y, &v.y)
bfeAdd(t0, t0, temp)
t1 := newBaseFieldElem()
bfeMul(t1, &u.y, &v.x)
bfeMul(temp, &u.x, &v.y)
bfeSub(t1, temp, t1)
t2 := newBaseFieldElem()
bfeSquare(t2, &v.x)
bfeSquare(temp, &v.y)
bfeAdd(t2, t2, temp)
t3 := newBaseFieldElem()
bfeSquare(t3, t0)
bfeSquare(temp, t1)
bfeAdd(t3, t3, temp)
for i := 0; i < 125; i++ {
bfeSquare(t3, t3)
}
t := newBaseFieldElem()
bfeAdd(t, t0, t3)
t.reduce()
if t.IsZero() {
bfeSub(t, t0, t3)
}
bfeDbl(t, t)
a := newBaseFieldElem()
bfeSquare(a, t2)
bfeMul(a, a, t2)
bfeMul(a, a, t)
a.chain1251(a)
b := newBaseFieldElem()
bfeMul(b, a, t2)
bfeMul(b, b, t)
bfeHalf(&c.x.x, b)
bfeMul(&c.x.y, a, t2)
bfeMul(&c.x.y, &c.x.y, t1)
// Recover x-coordinate exactly.
bfeSquare(temp, b)
bfeMul(temp, temp, t2)
if *temp != *t {
c.x.x, c.x.y = c.x.y, c.x.x
}
if c.x.sign() != s {
c.x.Neg(&c.x)
}
if !c.IsOnCurve() {
c.x.y.Neg(&c.x.y)
}
// Finally, verify point is valid and return.
if !c.IsOnCurve() {
return nil, false
}
feMul(&c.t, &c.x, &c.y)
return c, true
}
// SetBytesU returns c and true if pt represents an uncompressed point on the
// curve; false if not.
func (c *point) SetBytesU(pt [64]byte) (*point, bool) {
c.x.x.SetBytes(pt[:16])
c.x.y.SetBytes(pt[16:32])
c.y.x.SetBytes(pt[32:48])
c.y.y.SetBytes(pt[48:])
if !c.IsOnCurve() {
return nil, false
}
c.z.SetOne()
feMul(&c.t, &c.x, &c.y)
return c, true
}
// Bytes returns c, compressed into a [32]byte.
func (c *point) Bytes() (out [32]byte) {
c.MakeAffine()
c.y.y[1] += c.x.sign() << 63
x, y := c.y.x.Bytes(), c.y.y.Bytes()
copy(out[:16], x[:])
copy(out[16:], y[:])
return
}
// BytesU returns c, as an uncompressed [64]byte.
func (c *point) BytesU() (out [64]byte) {
c.MakeAffine()
xx, xy := c.x.x.Bytes(), c.x.y.Bytes()
yx, yy := c.y.x.Bytes(), c.y.y.Bytes()
copy(out[0:16], xx[:])
copy(out[16:32], xy[:])
copy(out[32:48], yx[:])
copy(out[48:], yy[:])
return
}
func (c *point) IsOnCurve() bool {
x2, y2 := newFieldElem(), newFieldElem()
feSquare(x2, &c.x)
feSquare(y2, &c.y)
lhs := newFieldElem()
feSub(lhs, y2, x2)
rhs := newFieldElem()
feMul(rhs, &c.x, &c.y)
feSquare(rhs, rhs)
feMul(rhs, rhs, d)
feAdd(rhs, rhs, one)
feSub(lhs, lhs, rhs)
lhs.reduce()
return lhs.IsZero()
}
func (c *point) MakeAffine() {
zInv := newFieldElem().Invert(&c.z)
feMul(&c.x, &c.x, zInv)
feMul(&c.y, &c.y, zInv)
c.z.SetOne()
feMul(&c.t, &c.x, &c.y)
c.x.reduce()
c.y.reduce()
c.t.reduce()
}
//go:noescape
func pDbl(a *point)
//go:noescape
func pMixedAdd(a, b *point)
<|start_filename|>fourq_test.go<|end_filename|>
package fourq
import (
"testing"
"crypto/elliptic"
"crypto/rand"
"fmt"
"math/big"
"golang.org/x/crypto/curve25519"
)
func TestIsOnCurve(t *testing.T) {
if !IsOnCurve(G) {
t.Fatal("Generator is not on curve.")
}
pt2, ok := ScalarMult(G, Order.Bytes(), false)
if ok {
t.Fatal("Returned ok on identity point.")
} else if !IsOnCurve(pt2) {
t.Fatal("Identity point is not on curve.")
}
k := make([]byte, 32)
rand.Read(k)
pt3, ok := ScalarMult(G, k, false)
if !ok {
t.Fatal("not ok")
} else if !IsOnCurve(pt3) {
t.Fatal("Random multiple of generator is not on curve.")
}
pt4 := [32]byte{}
pt4[0], pt4[16] = 5, 7
if IsOnCurve(pt4) {
t.Fatal("Non-existent point is on curve.")
}
}
func TestIsOnCurveU(t *testing.T) {
if !IsOnCurveU(GU) {
t.Fatal("Generator is not on curve.")
}
pt2, ok := ScalarMultU(GU, Order.Bytes(), false)
if ok {
t.Fatal("Returned ok on identity point.")
} else if !IsOnCurveU(pt2) {
t.Fatal("Identity point is not on curve.")
}
k := make([]byte, 32)
rand.Read(k)
pt3, ok := ScalarMultU(GU, k, false)
if !ok {
t.Fatal("not ok")
} else if !IsOnCurveU(pt3) {
t.Fatal("Random multiple of generator is not on curve.")
}
pt4 := [64]byte{}
pt4[0], pt4[32] = 5, 7
if IsOnCurveU(pt4) {
t.Fatal("Non-existent point is on curve.")
}
}
func TestScalarBaseMult(t *testing.T) {
pt3, ok := ScalarBaseMult(Order.Bytes())
if !ok {
t.Fatal("not ok")
} else if pt3 != [32]byte{1} {
t.Fatal("ScalarBaseMult(Order) was not identity.")
}
k := make([]byte, 32)
rand.Read(k)
pt4, ok := ScalarMult(G, k, false)
if !ok {
t.Fatal("not ok")
}
pt5, ok := ScalarBaseMult(k)
if !ok {
t.Fatal("not ok")
} else if pt4 != pt5 {
t.Fatal("ScalarMult(G, k) != ScalarBaseMult(k)")
}
}
func TestScalarBaseMultU(t *testing.T) {
pt3, ok := ScalarBaseMultU(Order.Bytes())
if !ok {
t.Fatal("not ok")
} else if pt3 != uncompressedIdentity {
t.Fatal("ScalarBaseMultU(Order) was not identity.")
}
k := make([]byte, 32)
rand.Read(k)
pt4, ok := ScalarMultU(GU, k, false)
if !ok {
t.Fatal("not ok")
}
pt5, ok := ScalarBaseMultU(k)
if !ok {
t.Fatal("not ok")
} else if pt4 != pt5 {
t.Fatal("ScalarMultU(GU, k) != ScalarBaseMultU(k)")
}
}
func TestScalarMult(t *testing.T) {
// Source: https://github.com/bifurcation/fourq/blob/master/impl/curve4q.py#L549
scalar := [4]uint64{0x3AD457AB55456230, 0x3A8B3C2C6FD86E0C, 0x7E38F7C9CFBB9166, 0x0028FD6CBDA458F0}
pt := G
var ok bool
for i := 0; i < 1000; i++ {
scalar[1] = scalar[2]
scalar[2] += scalar[0]
scalar[2] &= 0xffffffffffffffff
k := new(big.Int).SetUint64(scalar[3])
k.Lsh(k, 64)
k.Add(k, new(big.Int).SetUint64(scalar[2]))
k.Lsh(k, 64)
k.Add(k, new(big.Int).SetUint64(scalar[1]))
k.Lsh(k, 64)
k.Add(k, new(big.Int).SetUint64(scalar[0]))
pt, ok = ScalarMult(pt, k.Bytes(), false)
if !ok {
t.Fatal("not ok")
}
}
real := "44336f9967501c286c930e7c81b3010945125f9129c4e84f10e2acac8e940b57"
if fmt.Sprintf("%x", pt) != real {
t.Fatal("Point is wrong!")
}
}
func TestScalarMultU(t *testing.T) {
// Source: https://github.com/bifurcation/fourq/blob/master/impl/curve4q.py#L549
scalar := [4]uint64{0x3AD457AB55456230, 0x3A8B3C2C6FD86E0C, 0x7E38F7C9CFBB9166, 0x0028FD6CBDA458F0}
pt := GU
var ok bool
for i := 0; i < 1000; i++ {
scalar[1] = scalar[2]
scalar[2] += scalar[0]
scalar[2] &= 0xffffffffffffffff
k := new(big.Int).SetUint64(scalar[3])
k.Lsh(k, 64)
k.Add(k, new(big.Int).SetUint64(scalar[2]))
k.Lsh(k, 64)
k.Add(k, new(big.Int).SetUint64(scalar[1]))
k.Lsh(k, 64)
k.Add(k, new(big.Int).SetUint64(scalar[0]))
pt, ok = ScalarMultU(pt, k.Bytes(), false)
if !ok {
t.Fatal("not ok")
}
}
real := "ef4b49bd77b4d2df1b4ac9bf2b127c2559c4377254939576011fb1b50cf89b4644336f9967501c286c930e7c81b3010945125f9129c4e84f10e2acac8e940b57"
if fmt.Sprintf("%x", pt) != real {
t.Fatal("Point is wrong!")
}
}
func TestCofactorClearing(t *testing.T) {
limit := big.NewInt(1)
limit.Lsh(limit, 200)
K1, _ := rand.Int(rand.Reader, limit)
K2 := big.NewInt(392)
K2.Mul(K1, K2)
pt1, ok := ScalarMult(G, K1.Bytes(), true)
if !ok {
t.Fatal("not ok")
}
pt2, ok := ScalarMult(G, K2.Bytes(), false)
if !ok {
t.Fatal("not ok")
}
if pt1 != pt2 {
t.Fatal("Points are not the same.")
}
}
func BenchmarkScalarBaseMult(b *testing.B) {
k := make([]byte, 32)
rand.Read(k)
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
ScalarBaseMult(k)
}
}
func BenchmarkScalarMult(b *testing.B) {
k := make([]byte, 32)
rand.Read(k)
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
ScalarMult(G, k, false)
}
}
func BenchmarkScalarMultU(b *testing.B) {
k := make([]byte, 32)
rand.Read(k)
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
ScalarMultU(GU, k, false)
}
}
func BenchmarkP256Base(b *testing.B) {
c := elliptic.P256()
k := make([]byte, 32)
rand.Read(k)
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
c.ScalarBaseMult(k)
}
}
func BenchmarkP256(b *testing.B) {
c := elliptic.P256()
params := c.Params()
k := make([]byte, 32)
rand.Read(k)
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
c.ScalarMult(params.Gx, params.Gy, k)
}
}
func BenchmarkCurve25519(b *testing.B) {
dst, in := [32]byte{}, [32]byte{}
rand.Read(in[:])
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
curve25519.ScalarBaseMult(&dst, &in)
}
}
<|start_filename|>field.h<|end_filename|>
#include "base.h"
#define feMov(a0,a1,a2,a3, c0,c1,c2,c3) \
MOVQ a0, c0 \
MOVQ a1, c1 \
MOVQ a2, c2 \
MOVQ a3, c3
#define feNeg(c0,c1,c2,c3) \
bfeNeg(c0,c1) \
bfeNeg(c2,c3)
#define feDbl(c0,c1,c2,c3) \
bfeDbl(c0,c1) \
bfeDbl(c2,c3)
#define feAdd(a0,a1,a2,a3, c0,c1,c2,c3) \
bfeAdd(a0,a1, c0,c1) \
bfeAdd(a2,a3, c2,c3)
#define feSub(a0,a1,a2,a3, c0,c1,c2,c3) \
bfeSub(a0,a1, c0,c1) \
bfeSub(a2,a3, c2,c3)
#define feMul(ra, rb, rc) \
\ // T0 = a0 * b0, R11:R10:R9:R8 <- 0+ra:8+ra * 0+rb:8+rb
MOVQ 0+rb, DX \
MULXQ 0+ra, R8, R9 \
MULXQ 8+ra, R10, AX \
ADDQ R10, R9 \
MOVQ 8+rb, DX \
MULXQ 8+ra, R10, R11 \
ADCQ AX, R10 \
MULXQ 0+ra, DX, AX \
ADCQ $0, R11 \
ADDQ DX, R9 \
\
\ // T1 = a1 * b1, R15:R14:R13:R12 <- 16+ra:24+ra * 16+rb:24+rb
MOVQ 16+rb, DX \
MULXQ 16+ra, R12, R13 \
ADCQ AX, R10 \
MULXQ 24+ra, R14, AX \
ADCQ $0, R11 \
MOVQ 24+rb, DX \
ADDQ R14, R13 \
MULXQ 24+ra, R14, R15 \
ADCQ AX, R14 \
ADCQ $0, R15 \
MULXQ 16+ra, DX, AX \
ADDQ DX, R13 \
ADCQ AX, R14 \
ADCQ $0, R15 \
\
\ // c0 = T0 - T1 = a0*b0 - a1*b1
SUBQ R12, R8 \
SBBQ R13, R9 \
SBBQ R14, R10 \
SBBQ R15, R11 \
\
SHLQ $1, R10, R11 \
SHLQ $1, R9, R10 \
MOVQ 16+rb, DX \
BTRQ $63, R9 \
\
\ // T0 = a0 * b1, R15:R14:R13:R12 <- 0+ra:8+ra * 16+rb:24+rb
MULXQ 0+ra, R12, R13 \
BTRQ $63, R11 \
SBBQ $0, R10 \
SBBQ $0, R11 \
MULXQ 8+ra, R14, AX \
ADDQ R14, R13 \
MOVQ 24+rb, DX \
MULXQ 8+ra, R14, R15 \
ADCQ AX, R14 \
ADCQ $0, R15 \
MULXQ 0+ra, DX, AX \
ADDQ DX, R13 \
ADCQ AX, R14 \
ADCQ $0, R15 \
\
\ // Reducing and storing c0
ADDQ R8, R10 \
ADCQ R9, R11 \
BTRQ $63, R11 \
ADCQ $0, R10 \
ADCQ $0, R11 \
\
\ // T1 = a1 * b0, R12:R11:R10:R9 <- 16+ra:24+ra * 0+rb:8+rb
MOVQ 0+rb, DX \
MULXQ 16+ra, R8, R9 \
MOVQ R10, 0+rc \
MULXQ 24+ra, R10, AX \
MOVQ R11, 8+rc \
ADDQ R10, R9 \
MOVQ 8+rb, DX \
MULXQ 24+ra, R10, R11 \
ADCQ AX, R10 \
ADCQ $0, R11 \
MULXQ 16+ra, DX, AX \
ADDQ DX, R9 \
ADCQ AX, R10 \
ADCQ $0, R11 \
\
\ // c1 = T0 + T1 = a0*b1 + a1*b0
ADDQ R12, R8 \
ADCQ R13, R9 \
ADCQ R14, R10 \
ADCQ R15, R11 \
\
\ // Reducing and storing c1
SHLQ $1, R10, R11 \
SHLQ $1, R9, R10 \
BTRQ $63, R9 \
BTRQ $63, R11 \
ADCQ R10, R8 \
ADCQ R11, R9 \
BTRQ $63, R9 \
ADCQ $0, R8 \
ADCQ $0, R9 \
MOVQ R8, 16+rc \
MOVQ R9, 24+rc
#define feSquare(ra, rc) \
\ // t0 = R9:R8 = a0 + a1, R14:CX = a1
MOVQ 0+ra, R10 \
MOVQ 16+ra, R14 \
SUBQ R14, R10 \
MOVQ 8+ra, R11 \
MOVQ 24+ra, CX \
SBBQ CX, R11 \
\
BTRQ $63, R11 \
SBBQ $0, R10 \
\
\ // t1 = R11:R10 = a0 - a1
MOVQ R10, DX \
MOVQ 0+ra, R8 \
ADDQ R14, R8 \
MOVQ 8+ra, R9 \
ADCQ CX, R9 \
\
\ // c0 = t0 * t1 = (a0 + a1)*(a0 - a1), CX:R14:R13:R12 <- R9:R8 * R11:R10
MULXQ R8, R12, R13 \
SBBQ $0, R11 \
MULXQ R9, R14, AX \
MOVQ R11, DX \
ADDQ R14, R13 \
MULXQ R9, R14, CX \
MOVQ 8+ra, R9 \
ADCQ AX, R14 \
ADCQ $0, CX \
MULXQ R8, DX, AX \
MOVQ 0+ra, R8 \
ADDQ DX, R13 \
ADCQ AX, R14 \
ADCQ $0, CX \
\
\ // t2 = R9:R8 = 2*a0
ADDQ R8, R8 \
ADCQ R9, R9 \
\
\ // Reducing and storing c0
SHLQ $1, R14, CX \
SHLQ $1, R13, R14 \
BTRQ $63, R13 \
BTRQ $63, CX \
ADCQ R14, R12 \
ADCQ CX, R13 \
BTRQ $63, R13 \
ADCQ $0, R12 \
ADCQ $0, R13 \
MOVQ R12, 0+rc \
MOVQ R13, 8+rc \
\
\ // c1 = 2a0 * a1, CX:R14:R11:R10 <- R9:R8 * 16+ra:24+ra
MOVQ 16+ra, DX \
MULXQ R8, R10, R11 \
MULXQ R9, R14, AX \
ADDQ R14, R11 \
MOVQ 24+ra, DX \
MULXQ R9, R14, CX \
ADCQ AX, R14 \
ADCQ $0, CX \
MULXQ R8, DX, AX \
ADDQ DX, R11 \
ADCQ AX, R14 \
ADCQ $0, CX \
\
\ // Reduce and store c1
SHLQ $1, R14, CX \
SHLQ $1, R11, R14 \
BTRQ $63, R11 \
BTRQ $63, CX \
ADCQ R14, R10 \
ADCQ CX, R11 \
BTRQ $63, R11 \
ADCQ $0, R10 \
ADCQ $0, R11 \
MOVQ R10, 16+rc \
MOVQ R11, 24+rc
<|start_filename|>constants.go<|end_filename|>
package fourq
import (
"math/big"
)
var (
Order, _ = new(big.Int).SetString("73846995687063900142583536357581573884798075859800097461294096333596429543", 10)
aMask uint64 = 0x7fffffffffffffff
bMask uint64 = 0xffffffffffffffff
g = &point{
x: fieldElem{
x: baseFieldElem{0x286592ad7b3833aa, 0x1a3472237c2fb305},
y: baseFieldElem{0x96869fb360ac77f6, 0x1e1f553f2878aa9c},
},
y: fieldElem{
x: baseFieldElem{0xb924a2462bcbb287, 0x0e3fee9ba120785a},
y: baseFieldElem{0x49a7c344844c8b5c, 0x6e1c4af8630e0242},
},
t: fieldElem{
x: baseFieldElem{0x894ba36ee8cee416, 0x35bfa1947fb0913e},
y: baseFieldElem{0x673c574d296cd8d0, 0x7bfb41a38e7076ac},
},
z: fieldElem{
x: baseFieldElem{0x0000000000000001, 0x0000000000000000},
y: baseFieldElem{0x0000000000000000, 0x0000000000000000},
},
}
// G is the compressed generator point.
G = [32]byte{
0x87, 0xb2, 0xcb, 0x2b, 0x46, 0xa2, 0x24, 0xb9,
0x5a, 0x78, 0x20, 0xa1, 0x9b, 0xee, 0x3f, 0x0e,
0x5c, 0x8b, 0x4c, 0x84, 0x44, 0xc3, 0xa7, 0x49,
0x42, 0x02, 0x0e, 0x63, 0xf8, 0x4a, 0x1c, 0x6e,
}
// GU is the uncompressed generator point.
GU = [64]byte{
0xaa, 0x33, 0x38, 0x7b, 0xad, 0x92, 0x65, 0x28,
0x05, 0xb3, 0x2f, 0x7c, 0x23, 0x72, 0x34, 0x1a,
0xf6, 0x77, 0xac, 0x60, 0xb3, 0x9f, 0x86, 0x96,
0x9c, 0xaa, 0x78, 0x28, 0x3f, 0x55, 0x1f, 0x1e,
0x87, 0xb2, 0xcb, 0x2b, 0x46, 0xa2, 0x24, 0xb9,
0x5a, 0x78, 0x20, 0xa1, 0x9b, 0xee, 0x3f, 0x0e,
0x5c, 0x8b, 0x4c, 0x84, 0x44, 0xc3, 0xa7, 0x49,
0x42, 0x02, 0x0e, 0x63, 0xf8, 0x4a, 0x1c, 0x6e,
}
uncompressedIdentity = [64]byte{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1,
}
d = &fieldElem{
x: baseFieldElem{0x142, 0xe4},
y: baseFieldElem{0xb3821488f1fc0c8d, 0x5e472f846657e0fc},
}
one = &fieldElem{x: baseFieldElem{0x1, 0x0}}
// generatorBase contains pre-computed multiples of the curve's generator,
// to speed up scalarBaseMult.
generatorBase = [256]*point{}
)
func init() {
for idx := 1; idx < 256; idx++ {
s := make([]byte, 32)
for bit := uint(0); bit < 8; bit++ {
if (idx>>bit)&1 == 1 {
s[31-4*bit] = 1
}
}
pt := scalarMult((&point{}).Set(g), s, false)
pt.MakeAffine()
feMul(&pt.t, &pt.t, d)
generatorBase[idx] = pt
}
}
<|start_filename|>base.h<|end_filename|>
#define bfeMov(a0,a1, c0,c1) \
MOVQ a0, c0 \
MOVQ a1, c1
#define bfeReduce(c0,c1) \
BTRQ $63, c1 \
ADCQ $0, c0 \
ADCQ $0, c1
#define bfeNeg(c0,c1) \
NOTQ c0 \
NOTQ c1 \
BTRQ $63, c1
#define bfeDbl(c0,c1) \
SHLQ $1, c1:c0 \
SHLQ $1, c0:c1 \
BTRQ $63, c1
// bfeAdd adds a0:a1 to c0:c1.
#define bfeAdd(a0,a1, c0,c1) \
ADDQ a0, c0 \
ADCQ a1, c1 \
bfeReduce(c0,c1)
// bfeSub stores a0:a1 - c0:c1 in c0:c1.
#define bfeSub(a0,a1, c0,c1) \
bfeNeg(c0,c1) \
bfeAdd(a0,a1, c0,c1)
<|start_filename|>base.go<|end_filename|>
package fourq
import (
"fmt"
)
// baseFieldElem is an element of the curve's base field, the integers modulo
// p=2^127-1. baseFieldElem is always in reduced form.
type baseFieldElem [2]uint64
func newBaseFieldElem() *baseFieldElem {
return &baseFieldElem{}
}
func (e *baseFieldElem) String() string { return fmt.Sprintf("%16.16x %16.16x", e[1], e[0]) }
func (e *baseFieldElem) GoString() string {
return fmt.Sprintf("baseFieldElem{0x%16.16x, 0x%16.16x}", e[0], e[1])
}
func (e *baseFieldElem) Bytes() [16]byte {
return [16]byte{
byte(e[0]), byte(e[0] >> 8), byte(e[0] >> 16), byte(e[0] >> 24),
byte(e[0] >> 32), byte(e[0] >> 40), byte(e[0] >> 48), byte(e[0] >> 56),
byte(e[1]), byte(e[1] >> 8), byte(e[1] >> 16), byte(e[1] >> 24),
byte(e[1] >> 32), byte(e[1] >> 40), byte(e[1] >> 48), byte(e[1] >> 56),
}
}
func (e *baseFieldElem) Set(a *baseFieldElem) { e[0], e[1] = a[0], a[1] }
func (e *baseFieldElem) SetZero() { e[0], e[1] = 0, 0 }
func (e *baseFieldElem) SetOne() { e[0], e[1] = 1, 0 }
func (e *baseFieldElem) SetBytes(in []byte) {
e[0] = uint64(in[0]) | uint64(in[1])<<8 | uint64(in[2])<<16 |
uint64(in[3])<<24 | uint64(in[4])<<32 | uint64(in[5])<<40 |
uint64(in[6])<<48 | uint64(in[7])<<56
e[1] = uint64(in[8]) | uint64(in[9])<<8 | uint64(in[10])<<16 |
uint64(in[11])<<24 | uint64(in[12])<<32 | uint64(in[13])<<40 |
uint64(in[14])<<48 | uint64(in[15])<<56
}
func (e *baseFieldElem) IsZero() bool { return e[0] == 0 && e[1] == 0 }
func (e *baseFieldElem) Neg(a *baseFieldElem) *baseFieldElem {
e[0] = ^a[0]
e[1] = (^a[1]) & aMask
return e
}
// chain1251 sets e to a^(2^125-1) and returns e.
func (e *baseFieldElem) chain1251(a *baseFieldElem) *baseFieldElem {
t1 := newBaseFieldElem()
t2 := newBaseFieldElem()
t3 := newBaseFieldElem()
t4 := newBaseFieldElem()
t5 := newBaseFieldElem()
bfeSquare(t2, a)
bfeMul(t2, t2, a)
bfeSquare(t3, t2)
bfeSquare(t3, t3)
bfeMul(t3, t3, t2)
bfeSquare(t4, t3)
bfeSquare(t4, t4)
bfeSquare(t4, t4)
bfeSquare(t4, t4)
bfeMul(t4, t4, t3)
bfeSquare(t5, t4)
for i := 0; i < 7; i++ {
bfeSquare(t5, t5)
}
bfeMul(t5, t5, t4)
bfeSquare(t2, t5)
for i := 0; i < 15; i++ {
bfeSquare(t2, t2)
}
bfeMul(t2, t2, t5)
bfeSquare(t1, t2)
for i := 0; i < 31; i++ {
bfeSquare(t1, t1)
}
bfeMul(t1, t1, t2)
for i := 0; i < 32; i++ {
bfeSquare(t1, t1)
}
bfeMul(t1, t2, t1)
for i := 0; i < 16; i++ {
bfeSquare(t1, t1)
}
bfeMul(t1, t1, t5)
for i := 0; i < 8; i++ {
bfeSquare(t1, t1)
}
bfeMul(t1, t1, t4)
for i := 0; i < 4; i++ {
bfeSquare(t1, t1)
}
bfeMul(t1, t1, t3)
bfeSquare(t1, t1)
bfeMul(e, t1, a)
return e
}
// Invert sets e to a^(-1) and returns e.
func (e *baseFieldElem) Invert(a *baseFieldElem) *baseFieldElem {
t := newBaseFieldElem().chain1251(a)
bfeSquare(t, t)
bfeSquare(t, t)
bfeMul(e, t, a)
return e
}
// reduce sets e to zero if it is equal to p. This is the only case where e will
// not naturally be reduce to canonical form.
func (e *baseFieldElem) reduce() {
if e[0] == bMask && e[1] == aMask {
e[0], e[1] = 0, 0
}
}
//go:noescape
func bfeDbl(c, a *baseFieldElem)
//go:noescape
func bfeHalf(c, a *baseFieldElem)
//go:noescape
func bfeAdd(c, a, b *baseFieldElem)
//go:noescape
func bfeSub(c, a, b *baseFieldElem)
//go:noescape
func bfeMul(c, a, b *baseFieldElem)
//go:noescape
func bfeSquare(c, a *baseFieldElem)
| yifengyou/fourq |
<|start_filename|>Source/StomtPlugin/Public/StomtJsonObject.h<|end_filename|>
// Copyright 2018 STOMT GmbH. All Rights Reserved.
#pragma once
#include "StomtJsonValue.h"
#include "StomtJsonObject.generated.h"
/**
* Blueprintable FJsonObject wrapper
*/
UCLASS()
class UStomtRestJsonObject : public UObject
{
GENERATED_BODY()
public:
UStomtRestJsonObject();
/** Create new Json object */
UFUNCTION(BlueprintPure, meta = (DisplayName = "Construct Json Object", HidePin = "WorldContextObject", DefaultToSelf = "WorldContextObject"), Category = "Stomt|Json")
static UStomtRestJsonObject* ConstructJsonObject(UObject* WorldContextObject);
/** Reset all internal data */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void Reset();
/** Get the root Json object */
TSharedPtr<FJsonObject>& GetRootObject();
/** Set the root Json object */
void SetRootObject(TSharedPtr<FJsonObject>& JsonObject);
// //////////////////////////////////////////////////////////////////////////
// // Serialization
/** Serialize Json to string (formatted with line breaks) */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
FString EncodeJson() const;
/** Serialize Json to string (signel string without line breaks) */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
FString EncodeJsonToSingleString() const;
/** Construct Json object from string */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
bool DecodeJson(const FString& JsonString);
// //////////////////////////////////////////////////////////////////////////
// // FJsonObject API
//
/** Returns a list of field names that exist in the object */
UFUNCTION(BlueprintPure, Category = "Stomt|Json")
TArray<FString> GetFieldNames();
/** Checks to see if the FieldName exists in the object */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
bool HasField(const FString& FieldName) const;
/** Remove field named FieldName */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void RemoveField(const FString& FieldName);
/** Get the field named FieldName as a JsonValue */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
UStomtJsonValue* GetField(const FString& FieldName) const;
/** Add a field named FieldName with a Value */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetField(const FString& FieldName, UStomtJsonValue* JsonValue);
/** Get the field named FieldName as a Json Array */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
TArray<UStomtJsonValue*> GetArrayField(const FString& FieldName);
/** Set an ObjectField named FieldName and value of Json Array */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetArrayField(const FString& FieldName, const TArray<UStomtJsonValue*>& InArray);
/** Adds all of the fields from one json object to this one */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void MergeJsonObject(UStomtRestJsonObject* InJsonObject, bool Overwrite);
// //////////////////////////////////////////////////////////////////////////
// // FJsonObject API Helpers (easy to use with simple Json objects)
/** Get the field named FieldName as a number. Ensures that the field is present and is of type Json number.
* Attn.!! float used instead of double to make the function blueprintable! */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
float GetNumberField(const FString& FieldName) const;
/** Add a field named FieldName with Number as value
* Attn.!! float used instead of double to make the function blueprintable! */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetNumberField(const FString& FieldName, float Number);
/** Get the field named FieldName as a string. */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
FString GetStringField(const FString& FieldName) const;
/** add a field named fieldname with value of stringvalue */
UFUNCTION(BlueprintCallable, category = "Stomt|Json")
void SetStringField(const FString& Fieldname, const FString& stringvalue);
/** Get the field named FieldName as a boolean. */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
bool GetBoolField(const FString& FieldName) const;
/** Set a boolean field named FieldName and value of InValue */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetBoolField(const FString& FieldName, bool InValue);
/** Get the field named FieldName as a Json object. */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
UStomtRestJsonObject* GetObjectField(const FString& FieldName) const;
/** Set an ObjectField named FieldName and value of JsonObject */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetObjectField(const FString& FieldName, UStomtRestJsonObject* JsonObject);
//
// //////////////////////////////////////////////////////////////////////////
// // Array fields helpers (uniform arrays)
/** Get the field named FieldName as a Number Array. Use it only if you're sure that array is uniform!
* Attn.!! float used instead of double to make the function blueprintable! */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
TArray<float> GetNumberArrayField(const FString& FieldName);
/** Set an ObjectField named FieldName and value of Number Array
* Attn.!! float used instead of double to make the function blueprintable! */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetNumberArrayField(const FString& FieldName, const TArray<float>& NumberArray);
/** Get the field named FieldName as a String Array. Use it only if you're sure that array is uniform! */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
TArray<FString> GetStringArrayField(const FString& FieldName);
/** Set an ObjectField named FieldName and value of String Array */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetStringArrayField(const FString& FieldName, const TArray<FString>& StringArray);
/** Get the field named FieldName as a Bool Array. Use it only if you're sure that array is uniform! */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
TArray<bool> GetBoolArrayField(const FString& FieldName);
/** Set an ObjectField named FieldName and value of Bool Array */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetBoolArrayField(const FString& FieldName, const TArray<bool>& BoolArray);
/** Get the field named FieldName as an Object Array. Use it only if you're sure that array is uniform! */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
TArray<UStomtRestJsonObject*> GetObjectArrayField(const FString& FieldName);
/** Set an ObjectField named FieldName and value of Ob Array */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
void SetObjectArrayField(const FString& FieldName, const TArray<UStomtRestJsonObject*>& ObjectArray);
// //////////////////////////////////////////////////////////////////////////
// // Data
private:
/** Internal JSON data */
TSharedPtr<FJsonObject> JsonObj;
};
<|start_filename|>Source/StomtPlugin/Public/StomtJsonValue.h<|end_filename|>
// Copyright 2018 STOMT GmbH. All Rights Reserved.
#pragma once
#include "StomtPluginPrivatePCH.h"
#include "StomtJsonValue.generated.h"
/**
* Represents all the types a Json Value can be.
*/
UENUM(BlueprintType)
namespace StomtEnumJson
{
enum Type
{
None,
Null,
String,
Number,
Boolean,
Array,
Object,
};
}
/**
* Blueprintable FJsonValue wrapper
*/
UCLASS(BlueprintType, Blueprintable)
class UStomtJsonValue : public UObject
{
GENERATED_BODY()
public:
/** Create new Json Number value
* Attn.!! float used instead of double to make the function blueprintable! */
UFUNCTION(BlueprintPure, meta = (DisplayName = "Construct Json Number Value", HidePin = "WorldContextObject", DefaultToSelf = "WorldContextObject"), Category = "Stomt|Json")
static UStomtJsonValue* ConstructJsonValueNumber(UObject* WorldContextObject, float Number);
/** Create new Json String value */
UFUNCTION(BlueprintPure, meta = (DisplayName = "Construct Json String Value", HidePin = "WorldContextObject", DefaultToSelf = "WorldContextObject"), Category = "Stomt|Json")
static UStomtJsonValue* ConstructJsonValueString(UObject* WorldContextObject, const FString& StringValue);
/** Create new Json Bool value */
UFUNCTION(BlueprintPure, meta = (DisplayName = "Construct Json Bool Value", HidePin = "WorldContextObject", DefaultToSelf = "WorldContextObject"), Category = "Stomt|Json")
static UStomtJsonValue* ConstructJsonValueBool(UObject* WorldContextObject, bool InValue);
/** Create new Json Array value */
UFUNCTION(BlueprintPure, meta = (DisplayName = "Construct Json Array Value", HidePin = "WorldContextObject", DefaultToSelf = "WorldContextObject"), Category = "Stomt|Json")
static UStomtJsonValue* ConstructJsonValueArray(UObject* WorldContextObject, const TArray<UStomtJsonValue*>& InArray);
/** Create new Json value from FJsonValue (to be used from StomtJsonObject) */
static UStomtJsonValue* ConstructJsonValue(UObject* WorldContextObject, const TSharedPtr<FJsonValue>& InValue);
/** Get the root Json value */
TSharedPtr<FJsonValue>& GetRootValue();
/** Set the root Json value */
void SetRootValue(TSharedPtr<FJsonValue>& JsonValue);
//////////////////////////////////////////////////////////////////////////
// FJsonValue API
/** Get type of Json value (Enum) */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
StomtEnumJson::Type GetType() const;
/** Get type of Json value (String) */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
FString GetTypeString() const;
/** Returns true if this value is a 'null' */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
bool IsNull() const;
/** Returns this value as a double, throwing an error if this is not an Json Number
* Attn.!! float used instead of double to make the function blueprintable! */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
float AsNumber() const;
/** Returns this value as a number, throwing an error if this is not an Json String */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
FString AsString() const;
/** Returns this value as a boolean, throwing an error if this is not an Json Bool */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
bool AsBool() const;
/** Returns this value as an array, throwing an error if this is not an Json Array */
UFUNCTION(BlueprintCallable, Category = "Stomt|Json")
TArray<UStomtJsonValue*> AsArray() const;
//////////////////////////////////////////////////////////////////////////
// Data
private:
/** Internal JSON data */
TSharedPtr<FJsonValue> JsonVal;
//////////////////////////////////////////////////////////////////////////
// Helpers
protected:
/** Simple error logger */
void ErrorMessage(const FString& InType) const;
};
| SaraKausch/stomt-unreal-plugin |
<|start_filename|>cmd/infracost/output_test.go<|end_filename|>
package main_test
import (
"io/ioutil"
"path/filepath"
"testing"
"github.com/infracost/infracost/internal/testutil"
"github.com/stretchr/testify/require"
)
func TestOutputHelp(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--help"}, nil)
}
func TestOutputFormatHTML(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "html", "--path", "./testdata/example_out.json", "--path", "./testdata/azure_firewall_out.json"}, nil)
}
func TestOutputFormatJSON(t *testing.T) {
opts := DefaultOptions()
opts.IsJSON = true
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "json", "--path", "./testdata/example_out.json", "--path", "./testdata/azure_firewall_out.json"}, opts)
}
func TestOutputFormatGitHubComment(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "github-comment", "--path", "./testdata/example_out.json", "--path", "./testdata/terraform_v0.14_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatGitHubCommentMultipleSkipped(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "github-comment", "--path", "./testdata/example_out.json", "--path", "./testdata/terraform_v0.14_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatGitHubCommentNoChange(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "github-comment", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatGitLabComment(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "gitlab-comment", "--path", "./testdata/example_out.json", "--path", "./testdata/terraform_v0.14_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatGitLabCommentMultipleSkipped(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "gitlab-comment", "--path", "./testdata/example_out.json", "--path", "./testdata/terraform_v0.14_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatGitLabCommentNoChange(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "gitlab-comment", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatSlackMessage(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "slack-message", "--path", "./testdata/example_out.json", "--path", "./testdata/terraform_v0.14_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatSlackMessageMultipleSkipped(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "slack-message", "--path", "./testdata/example_out.json", "--path", "./testdata/terraform_v0.14_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatSlackMessageNoChange(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "slack-message", "--path", "./testdata/terraform_v0.14_nochange_breakdown.json"}, nil)
}
func TestOutputFormatTable(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--format", "table", "--path", "./testdata/example_out.json", "--path", "./testdata/azure_firewall_out.json"}, nil)
}
func TestOutputTerraformFieldsAll(t *testing.T) {
GoldenFileCommandTest(t, testutil.CalcGoldenFileTestdataDirName(), []string{"output", "--path", "./testdata/example_out.json", "--path", "./testdata/azure_firewall_out.json", "--fields", "all"}, nil)
}
func TestOutputTerraformOutFileHTML(t *testing.T) {
testdataName := testutil.CalcGoldenFileTestdataDirName()
goldenFilePath := "./testdata/" + testdataName + "/infracost_output.golden"
outputPath := filepath.Join(t.TempDir(), "infracost_output.html")
GoldenFileCommandTest(t, testdataName, []string{"output", "--path", "./testdata/example_out.json", "--format", "html", "--out-file", outputPath}, nil)
actual, err := ioutil.ReadFile(outputPath)
require.Nil(t, err)
actual = stripDynamicValues(actual)
testutil.AssertGoldenFile(t, goldenFilePath, actual)
}
func TestOutputTerraformOutFileJSON(t *testing.T) {
testdataName := testutil.CalcGoldenFileTestdataDirName()
goldenFilePath := "./testdata/" + testdataName + "/infracost_output.golden"
outputPath := filepath.Join(t.TempDir(), "infracost_output.json")
GoldenFileCommandTest(t, testdataName, []string{"output", "--path", "./testdata/example_out.json", "--format", "json", "--out-file", outputPath}, nil)
actual, err := ioutil.ReadFile(outputPath)
require.Nil(t, err)
actual = stripDynamicValues(actual)
testutil.AssertGoldenFile(t, goldenFilePath, actual)
}
func TestOutputTerraformOutFileTable(t *testing.T) {
testdataName := testutil.CalcGoldenFileTestdataDirName()
goldenFilePath := "./testdata/" + testdataName + "/infracost_output.golden"
outputPath := filepath.Join(t.TempDir(), "infracost_output.txt")
GoldenFileCommandTest(t, testdataName, []string{"output", "--path", "./testdata/example_out.json", "--out-file", outputPath}, nil)
actual, err := ioutil.ReadFile(outputPath)
require.Nil(t, err)
actual = stripDynamicValues(actual)
testutil.AssertGoldenFile(t, goldenFilePath, actual)
}
| infracost/infracost |
<|start_filename|>format/run.jl<|end_filename|>
using JuliaFormatter
function main()
perfect = format(joinpath(@__DIR__, ".."); style=YASStyle())
if perfect
@info "Linting complete - no files altered"
else
@info "Linting complete - files altered"
run(`git status`)
end
return nothing
end
main()
| 0x0f0f0f/Mixtape.jl |
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/CpuInfo.cs<|end_filename|>
using System.Collections.Generic;
using System.IO;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
class CpuInfo
{
class LogicalCpuInfo
{
public int Id;
public string SocketId;
public string CoreId;
}
static LogicalCpuInfo[] _cpuInfos = GetCpuInfos();
private static LogicalCpuInfo[] GetCpuInfos()
{
var sysPath = "/sys/devices/system/cpu";
var directories = Directory.GetDirectories(sysPath, "cpu*");
var cpuInfos = new List<LogicalCpuInfo>();
foreach (var directory in directories)
{
int id;
if (int.TryParse(directory.Substring(sysPath.Length + 4), out id))
{
var cpuInfo = new LogicalCpuInfo
{
Id = id,
SocketId = File.ReadAllText($"{sysPath}/cpu{id}/topology/physical_package_id").Trim(),
CoreId = File.ReadAllText($"{sysPath}/cpu{id}/topology/core_id").Trim()
};
cpuInfos.Add(cpuInfo);
}
}
return cpuInfos.ToArray();
}
public static IEnumerable<string> GetSockets()
{
for (int i = 0; i < _cpuInfos.Length; i++)
{
var socket = _cpuInfos[i].SocketId;
bool duplicate = false;
for (int j = 0; j < i; j++)
{
if (socket == _cpuInfos[j].SocketId)
{
duplicate = true;
break;
}
}
if (!duplicate)
{
yield return socket;
}
}
}
public static IEnumerable<string> GetCores(string socket)
{
for (int i = 0; i < _cpuInfos.Length; i++)
{
var cpuInfo = _cpuInfos[i];
if (cpuInfo.SocketId != socket)
{
continue;
}
var core = _cpuInfos[i].CoreId;
bool duplicate = false;
for (int j = 0; j < i; j++)
{
if (_cpuInfos[j].SocketId != socket)
{
continue;
}
if (core == _cpuInfos[j].CoreId)
{
duplicate = true;
break;
}
}
if (!duplicate)
{
yield return core;
}
}
}
public static IEnumerable<int> GetCpuIds(string socket, string core)
{
for (int i = 0; i < _cpuInfos.Length; i++)
{
var cpuInfo = _cpuInfos[i];
if (cpuInfo.SocketId != socket || cpuInfo.CoreId != core)
{
continue;
}
yield return _cpuInfos[i].Id;
}
}
public static int GetAvailableCpus()
{
return _cpuInfos.Length;
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/ErrorInterop.cs<|end_filename|>
// Copyright 2017 <NAME> <<EMAIL>>
// This software is made available under the MIT License
// See COPYING for details
using System;
using System.Runtime.InteropServices;
using static Tmds.Linux.LibC;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
static class ErrorInterop
{
public static unsafe string StrError(int errno)
{
int maxBufferLength = 1024; // should be long enough for most any UNIX error
byte* buffer = stackalloc byte[maxBufferLength];
int rv = Tmds.Linux.LibC.strerror_r(errno, buffer, maxBufferLength);
return rv == 0 ? Marshal.PtrToStringAnsi((IntPtr)buffer) : $"errno={errno}";
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/ITransportActionHandler.cs<|end_filename|>
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
interface ITransportActionHandler
{
Task BindAsync();
Task UnbindAsync();
Task StopAsync();
ValueTask<ConnectionContext> AcceptAsync(CancellationToken cancellationToken = default);
}
}
<|start_filename|>externals/MemoryPoolThrowHelper.cs<|end_filename|>
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Text;
namespace System.Buffers
{
internal class MemoryPoolThrowHelper
{
public static void ThrowArgumentOutOfRangeException_BufferRequestTooLarge(int maxSize)
{
throw GetArgumentOutOfRangeException_BufferRequestTooLarge(maxSize);
}
public static void ThrowObjectDisposedException(ExceptionArgument argument)
{
throw GetObjectDisposedException(argument);
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static ArgumentOutOfRangeException GetArgumentOutOfRangeException_BufferRequestTooLarge(int maxSize)
{
return new ArgumentOutOfRangeException(GetArgumentName(ExceptionArgument.size), $"Cannot allocate more than {maxSize} bytes in a single buffer");
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static ObjectDisposedException GetObjectDisposedException(ExceptionArgument argument)
{
return new ObjectDisposedException(GetArgumentName(argument));
}
private static string GetArgumentName(ExceptionArgument argument)
{
Debug.Assert(Enum.IsDefined(typeof(ExceptionArgument), argument), "The enum value is not defined, please check the ExceptionArgument Enum.");
return argument.ToString();
}
internal enum ExceptionArgument
{
size,
offset,
length,
MemoryPoolBlock,
MemoryPool
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/TransportFactory.cs<|end_filename|>
using System;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
public class LinuxTransportFactory : IConnectionListenerFactory
{
private LinuxTransportOptions _options;
private ILoggerFactory _loggerFactory;
public LinuxTransportFactory(IOptions<LinuxTransportOptions> options, ILoggerFactory loggerFactory)
{
if (options == null)
{
throw new ArgumentNullException(nameof(options));
}
if (loggerFactory == null)
{
throw new ArgumentNullException(nameof(loggerFactory));
}
_options = options.Value;
_loggerFactory = loggerFactory;
}
public async ValueTask<IConnectionListener> BindAsync(EndPoint endpoint, CancellationToken cancellationToken = default)
{
var transport = new Transport(endpoint, _options, _loggerFactory);
await transport.BindAsync();
return transport;
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/AddressNotAvailableException.cs<|end_filename|>
using System;
using System.Runtime.Serialization;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
public class AddressNotAvailableException : Exception
{
public AddressNotAvailableException()
{
}
protected AddressNotAvailableException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
public AddressNotAvailableException(string message)
: base(message)
{
}
public AddressNotAvailableException(string message, Exception innerException)
: base(message, innerException)
{
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/CpuSet.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.ComponentModel;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
[TypeConverter(typeof(CpuSetTypeConverter))]
internal struct CpuSet
{
int[] _cpus;
public int[] Cpus => _cpus ?? Array.Empty<int>();
public bool IsEmpty => _cpus == null || _cpus.Length == 0;
private CpuSet(int[] cpus)
{
_cpus = cpus;
}
private static bool ParseFailed(bool tryParse, string error)
{
if (tryParse)
{
return false;
}
throw new FormatException(error);
}
public static bool Parse(string set, out CpuSet cpus, bool tryParse)
{
cpus = default(CpuSet);
if (set == null)
{
if (tryParse)
{
return false;
}
throw new ArgumentNullException(nameof(set));
}
if (set.Length == 0)
{
cpus = new CpuSet(Array.Empty<int>());
return true;
}
int index = 0;
var cpuList = new List<int>();
do
{
int start;
if (!TryParseNumber(set, ref index, out start))
{
return ParseFailed(tryParse, $"Can not parse number at {index}");
}
if (index == set.Length)
{
cpuList.Add(start);
break;
}
else if (set[index] == ',')
{
cpuList.Add(start);
index++;
continue;
}
else if (set[index] == '-')
{
index++;
int end;
if (!TryParseNumber(set, ref index, out end))
{
return ParseFailed(tryParse, $"Can not parse number at {index}");
}
if (start > end)
{
return ParseFailed(tryParse, "End of range is larger than start");
}
for (int i = start; i <= end; i++)
{
cpuList.Add(i);
}
if (index == set.Length)
{
break;
}
else if (set[index] == ',')
{
index++;
continue;
}
else
{
return ParseFailed(tryParse, $"Invalid character at {index}: '{set[index]}'");
}
}
else
{
return ParseFailed(tryParse, $"Invalid character at {index}: '{set[index]}'");
}
} while (index != set.Length);
var cpuArray = cpuList.ToArray();
Array.Sort(cpuArray);
cpus = new CpuSet(cpuArray);
return true;
}
public static bool TryParse(string set, out CpuSet cpus)
{
return Parse(set, out cpus, tryParse: true);
}
public static CpuSet Parse(string set)
{
CpuSet cpus;
Parse(set, out cpus, tryParse: false);
return cpus;
}
private static bool TryParseNumber(string s, ref int index, out int value)
{
if (index == s.Length)
{
value = 0;
return false;
}
int startIndex = index;
while (index < s.Length && Char.IsDigit(s[index])) { index++; }
return int.TryParse(s.Substring(startIndex, index - startIndex), out value);
}
public override string ToString()
{
return _cpus == null ? string.Empty : string.Join(",", _cpus);
}
}
}
<|start_filename|>samples/KestrelSample/Program.cs<|end_filename|>
using System;
using System.IO;
using System.IO.Pipelines;
using System.Linq;
using System.Net;
using System.Runtime;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Console;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal;
using Benchmarks.Middleware;
using RedHat.AspNetCore.Server.Kestrel.Transport.Linux;
namespace SampleApp
{
public class Startup
{
IConfiguration Configuration;
static bool _log;
public Startup()
{
var configBuilder = new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("appsettings.json", optional: false, reloadOnChange: false);
Configuration = configBuilder.Build();
}
public void ConfigureServices(IServiceCollection services)
{
if (_log)
{
services.AddLogging(builder =>
{
builder.AddConfiguration(Configuration.GetSection("Logging"))
.SetMinimumLevel(LogLevel.Debug)
.AddConsole();
});
}
}
public void Configure(IApplicationBuilder app)
{
app.UsePlainText();
app.UseJson();
app.Run(async context =>
{
var response = $"hello, world{Environment.NewLine}";
context.Response.ContentLength = response.Length;
context.Response.ContentType = "text/plain";
await context.Response.WriteAsync(response);
});
}
public static void Main(string[] args)
{
if (args.Contains("--help"))
{
Console.WriteLine("Options: [libuv] [-c<cpuset>] [-t<threadcount>] [ta] [ic] [noda] [nott]");
Console.WriteLine(" General:");
Console.WriteLine("\tlibuv Use libuv Transport instead of Linux Transport");
Console.WriteLine("\tsock Use Sockets Transport instead of Linux Transport");
Console.WriteLine("\t-t<tc> Number of transport threads");
Console.WriteLine("\t-z<th> Threshold for using zero-copy");
Console.WriteLine("\tnott Defer requests to thread pool");
Console.WriteLine(" Linux transport specific:");
// Console.WriteLine("\tta Set thread affinity");
// Console.WriteLine("\tic Receive on incoming cpu (implies ta)");
// Console.WriteLine("\t-c<cpus> Cpus for transport threads (implies ta, count = default for -t)");
Console.WriteLine("\tnoda No deferred accept");
Console.WriteLine("\tnods No deferred send");
Console.WriteLine("\taior Receive using Linux aio");
Console.WriteLine("\taios Send using Linux aio");
return;
}
TaskScheduler.UnobservedTaskException += (sender, e) =>
{
Console.WriteLine("Unobserved exception: {0}", e.Exception);
};
bool libuv = args.Contains("libuv");
bool sock = args.Contains("sock");
// bool ta = args.Contains("ta");
// bool ic = args.Contains("ic");
bool ds = !args.Contains("nods");
bool da = !args.Contains("noda");
bool tt = !args.Contains("nott");
bool aior = args.Contains("aior");
bool aios = args.Contains("aios");
_log = args.Contains("log");
int threadCount = 0;
int zeroCopyThreshold = LinuxTransportOptions.NoZeroCopy;
// CpuSet cpuSet = default(CpuSet);
foreach (var arg in args)
{
// if (arg.StartsWith("-c"))
// {
// cpuSet = CpuSet.Parse(arg.Substring(2));
// ta = true;
// }
//else
if (arg.StartsWith("-t"))
{
threadCount = int.Parse(arg.Substring(2));
}
else if (arg.StartsWith("-z"))
{
zeroCopyThreshold = int.Parse(arg.Substring(2));
}
}
// if (ic)
// {
// ta = true;
// }
if (threadCount == 0)
{
// threadCount = (libuv || cpuSet.IsEmpty) ? Environment.ProcessorCount : cpuSet.Cpus.Length;
threadCount = Environment.ProcessorCount;
}
Console.WriteLine($"Server GC is {(GCSettings.IsServerGC ? "enabled" : "disabled")}");
if (libuv)
{
Console.WriteLine($"Using Libuv: ThreadCount={threadCount}, UseTransportThread={tt}");
}
else if (sock)
{
System.Console.WriteLine($"Using Sockets: IOQueueCount={threadCount}");
}
else
{
// Console.WriteLine($"Using Linux Transport: Cpus={cpuSet}, ThreadCount={threadCount}, IncomingCpu={ic}, SetThreadAffinity={ta}, DeferAccept={da}, UseTransportThread={tt}");
Console.WriteLine($"Using Linux Transport: ThreadCount={threadCount}, DeferAccept={da}, UseTransportThread={tt}, ZeroCopyThreshold={zeroCopyThreshold}, DeferSend={ds}");
}
var hostBuilder = new WebHostBuilder()
.UseKestrel(options =>
{
options.AllowSynchronousIO = true;
})
.UseStartup<Startup>();
if (libuv)
{
hostBuilder = hostBuilder.UseLibuv(options => options.ThreadCount = threadCount);
}
else if (sock)
{
hostBuilder = hostBuilder.UseSockets(options => options.IOQueueCount = threadCount);
}
else
{
hostBuilder = hostBuilder.UseLinuxTransport(options =>
{
options.ThreadCount = threadCount;
options.ApplicationSchedulingMode = tt ? PipeScheduler.Inline : PipeScheduler.ThreadPool;
//options.SetThreadAffinity = ta;
//options.ReceiveOnIncomingCpu = ic;
options.DeferAccept = da;
options.DeferSend = ds;
options.ZeroCopyThreshold = zeroCopyThreshold;
options.ZeroCopy = true;
options.AioReceive = aior;
options.AioSend = aios;
//options.CpuSet = cpuSet;
});
}
var host = hostBuilder.Build();
host.Run();
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/Assembly.cs<|end_filename|>
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("RedHat.AspNetCore.Server.Kestrel.Transport.Linux.Test")]
[assembly: InternalsVisibleTo("RedHat.AspNetCore.Server.Kestrel.Transport.Linux.TestApp")]
[assembly: InternalsVisibleTo("KestrelSample")]
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/PosixResult.Values.cs<|end_filename|>
// Copyright 2017 <NAME> <<EMAIL>>
// This software is made available under the MIT License
// See COPYING for details
using System.Collections.Generic;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
internal partial struct PosixResult
{
public static int EAFNOSUPPORT => -Tmds.Linux.LibC.EAFNOSUPPORT;
public static int EAGAIN => -Tmds.Linux.LibC.EAGAIN;
public static int ECONNABORTED => -Tmds.Linux.LibC.ECONNABORTED;
public static int ECONNRESET => -Tmds.Linux.LibC.ECONNRESET;
public static int EINVAL => -Tmds.Linux.LibC.EINVAL;
public static int ENOBUFS => -Tmds.Linux.LibC.ENOBUFS;
public static int EPIPE => -Tmds.Linux.LibC.EPIPE;
public static int ECONNREFUSED => -Tmds.Linux.LibC.ECONNREFUSED;
public static int EADDRINUSE => -Tmds.Linux.LibC.EADDRINUSE;
public static int EADDRNOTAVAIL = -Tmds.Linux.LibC.EADDRNOTAVAIL;
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/PipeEnd.cs<|end_filename|>
using System;
using System.Runtime.InteropServices;
using static Tmds.Linux.LibC;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
struct PipeEndPair
{
public PipeEnd ReadEnd;
public PipeEnd WriteEnd;
public void Dispose()
{
ReadEnd?.Dispose();
WriteEnd?.Dispose();
}
}
static class PipeInterop
{
public unsafe static PosixResult Pipe(out PipeEnd readEnd, out PipeEnd writeEnd, bool blocking)
{
int* fds = stackalloc int[2];
int flags = O_CLOEXEC;
if (!blocking)
{
flags |= O_NONBLOCK;
}
readEnd = new PipeEnd();
writeEnd = new PipeEnd();
int res = pipe2(fds, flags);
if (res == 0)
{
readEnd.SetHandle(fds[0]);
writeEnd.SetHandle(fds[1]);
}
else
{
readEnd = null;
writeEnd = null;
}
return PosixResult.FromReturnValue(res);
}
}
class PipeEnd : CloseSafeHandle
{
internal PipeEnd()
{}
public void WriteByte(byte b)
{
TryWriteByte(b)
.ThrowOnError();
}
public unsafe PosixResult TryWriteByte(byte b)
{
return base.TryWrite(&b, 1);
}
public unsafe byte ReadByte()
{
byte b = 0;
var result = base.TryRead(&b, 1);
result.ThrowOnError();
return b;
}
public unsafe PosixResult TryReadByte()
{
byte b;
var result = base.TryRead(&b, 1);
if (result.IsSuccess)
{
return new PosixResult(b);
}
else
{
return result;
}
}
public int Write(ArraySegment<byte> buffer)
{
var result = TryWrite(buffer);
result.ThrowOnError();
return result.IntValue;
}
public new PosixResult TryWrite(ArraySegment<byte> buffer)
{
return base.TryWrite(buffer);
}
public int Read(ArraySegment<byte> buffer)
{
var result = TryRead(buffer);
result.ThrowOnError();
return result.IntValue;
}
public new PosixResult TryRead(ArraySegment<byte> buffer)
{
return base.TryRead(buffer);
}
public static PipeEndPair CreatePair(bool blocking)
{
PipeEnd readEnd;
PipeEnd writeEnd;
var result = PipeInterop.Pipe(out readEnd, out writeEnd, blocking);
result.ThrowOnError();
return new PipeEndPair { ReadEnd = readEnd, WriteEnd = writeEnd };
}
}
}
<|start_filename|>test/RedHat.AspNetCore.Server.Kestrel.Transport.Linux.Test/TransportTests.cs<|end_filename|>
using System;
using System.Buffers;
using System.IO;
using System.IO.Pipelines;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using RedHat.AspNetCore.Server.Kestrel.Transport.Linux;
using Xunit;
using static Tmds.Linux.LibC;
namespace Tests
{
public abstract class TransportTestsBase
{
protected abstract TestServerOptions CreateOptions();
private TestServer CreateTestServer(Action<TestServerOptions> configure = null)
{
TestServerOptions options = CreateOptions();
configure?.Invoke(options);
return new TestServer(options);
}
private TestServer CreateTestServer(TestServerConnectionDispatcher connectionDispatcher)
=> CreateTestServer(options => options.ConnectionDispatcher = connectionDispatcher);
[InlineData(true)]
[InlineData(false)]
[Theory]
public async Task Echo_DeferAccept(bool deferAccept)
{
using (var testServer = CreateTestServer(options => options.DeferAccept = deferAccept))
{
await testServer.BindAsync();
using (var client = testServer.ConnectTo())
{
// Send some bytes
byte[] sendBuffer = new byte[] { 1, 2, 3 };
client.Send(new ArraySegment<byte>(sendBuffer));
// Read the echo
byte[] receiveBuffer = new byte[10];
var received = client.Receive(new ArraySegment<byte>(receiveBuffer));
Assert.Equal(sendBuffer.Length, received);
}
await testServer.StopAsync();
}
}
[Fact]
public async Task MultiThread()
{
using (var testServer = CreateTestServer(options => options.ThreadCount = 2))
{
await testServer.BindAsync();
await testServer.UnbindAsync();
await testServer.StopAsync();
}
}
[Fact]
public async Task Unbind()
{
using (var testServer = CreateTestServer())
{
await testServer.BindAsync();
await testServer.UnbindAsync();
var exception = Assert.Throws<IOException>(() => testServer.ConnectTo());
Assert.Equal(ECONNREFUSED, exception.HResult);
await testServer.StopAsync();
}
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public async Task StopDisconnectsClient(bool waitForAccept)
{
var clientAcceptedTcs = new TaskCompletionSource<object>();
TestServerConnectionDispatcher connectionDispatcher = (input, output, context) =>
{
clientAcceptedTcs.SetResult(output);
return Task.Delay(int.MaxValue, context.ConnectionClosed);
};
using (var testServer = CreateTestServer(connectionDispatcher))
{
await testServer.BindAsync();
using (var client = testServer.ConnectTo())
{
// Make sure the client is accepted.
if (waitForAccept)
{
await clientAcceptedTcs.Task;
}
// Server shutdown:
await testServer.UnbindAsync();
await testServer.StopAsync();
byte[] receiveBuffer = new byte[10];
var received = client.Receive(new ArraySegment<byte>(receiveBuffer));
// Socket was accepted by the server, which will do a normal TCP close.
Assert.Equal(0, received);
// send returns EPIPE
var exception = Assert.Throws<IOException>(() =>
{
for (int i = 0; i < 10; i++)
{
byte[] sendBuffer = new byte[] { 1, 2, 3 };
client.Send(new ArraySegment<byte>(sendBuffer));
}
});
Assert.Equal(EPIPE, exception.HResult);
}
await testServer.StopAsync();
}
}
[Fact]
public async Task Writable()
{
const int bufferSize = 2048;
int bytesWritten = 0;
var waitingForWritable = new TaskCompletionSource<object>();
TestServerConnectionDispatcher connectionDispatcher = async (input, output, _) =>
{
Timer writeTimeout = new Timer(
// timeout -> we are waiting for the socket to become writable
o => waitingForWritable.SetResult(null),
null, Timeout.Infinite, Timeout.Infinite
);
do
{
var memory = output.GetMemory(bufferSize);
output.Advance(bufferSize);
bytesWritten += bufferSize;
// If it takes 1 second to write, assume the socket
// is no longer writable
writeTimeout.Change(1000, Timeout.Infinite);
await output.FlushAsync();
// cancel the timeout
writeTimeout.Change(Timeout.Infinite, Timeout.Infinite);
} while (!waitingForWritable.Task.IsCompleted);
writeTimeout.Dispose();
output.Complete();
input.Complete();
};
using (var testServer = CreateTestServer(connectionDispatcher))
{
await testServer.BindAsync();
using (var client = testServer.ConnectTo())
{
// wait for the server to have sent so much data
// so it waiting for us to read some
await waitingForWritable.Task;
// read all the data
int receivedBytes = 0;
byte[] receiveBuffer = new byte[bufferSize];
while (receivedBytes < bytesWritten)
{
var received = client.Receive(new ArraySegment<byte>(receiveBuffer));
receivedBytes += received;
}
}
await testServer.StopAsync();
}
}
[Fact]
public async Task Write_Timeout()
{
const int bufferSize = 2048;
var waitingForTimeout = new TaskCompletionSource<object>();
TestServerConnectionDispatcher connectionDispatcher = async (input, output, connection) =>
{
Timer writeTimeout = new Timer(
_ => connection.Abort(),
null, Timeout.Infinite, Timeout.Infinite
);
do
{
var memory = output.GetMemory(bufferSize);
output.Advance(bufferSize);
// If it takes 1 second to write, assume the socket
// is no longer writable
writeTimeout.Change(1000, Timeout.Infinite);
var flushResult = await output.FlushAsync();
if (flushResult.IsCanceled || flushResult.IsCompleted)
{
break;
}
// cancel the timeout
writeTimeout.Change(Timeout.Infinite, Timeout.Infinite);
} while (true);
waitingForTimeout.SetResult(null);
writeTimeout.Dispose();
output.Complete();
input.Complete();
};
using (var testServer = CreateTestServer(connectionDispatcher))
{
await testServer.BindAsync();
using (var client = testServer.ConnectTo())
{
// wait for the server to timeout our connection
// because we aren't reading
await waitingForTimeout.Task;
}
await testServer.StopAsync();
}
}
[Fact]
public async Task CompletingOutputCancelsInput()
{
var inputCompletedTcs = new TaskCompletionSource<object>();
TestServerConnectionDispatcher connectionDispatcher = async (input, output, _) =>
{
output.Complete();
bool exceptionThrown = true;
try
{
await input.ReadAsync();
}
catch (ConnectionAbortedException)
{
exceptionThrown = true;
}
Assert.True(exceptionThrown);
inputCompletedTcs.SetResult(null);
};
using (var testServer = CreateTestServer(connectionDispatcher))
{
await testServer.BindAsync();
using (var client = testServer.ConnectTo())
{
await inputCompletedTcs.Task;
}
await testServer.StopAsync();
}
}
[Fact]
public async Task Receive()
{
// client send 1M bytes which are an int counter
// server receives and checkes the counting
const int receiveLength = 1000000;
TestServerConnectionDispatcher connectionDispatcher = async (input, output, _) =>
{
int bytesReceived = 0;
int remainder = 0; // remaining bytes between ReadableBuffers
while (true)
{
var readResult = await input.ReadAsync();
var buffer = readResult.Buffer;
if (buffer.IsEmpty && readResult.IsCompleted)
{
input.AdvanceTo(buffer.End);
break;
}
AssertCounter(ref buffer, ref bytesReceived, ref remainder);
input.AdvanceTo(buffer.End);
}
Assert.Equal(receiveLength, bytesReceived);
output.Complete();
input.Complete();
};
using (var testServer = CreateTestServer(connectionDispatcher))
{
await testServer.BindAsync();
using (var client = testServer.ConnectTo())
{
var buffer = new byte[1000000];
FillBuffer(new ArraySegment<byte>(buffer), 0);
int offset = 0;
do
{
offset += client.Send(new ArraySegment<byte>(buffer, offset, buffer.Length - offset));
} while (offset != buffer.Length);
client.Shutdown(SHUT_WR);
// wait for the server to stop
var receiveBuffer = new byte[1];
client.Receive(new ArraySegment<byte>(receiveBuffer));
}
await testServer.StopAsync();
}
}
[Fact]
public async Task Send()
{
// server send 1M bytes which are an int counter
// client receives and checkes the counting
const int sendLength = 1_000_000;
TestServerConnectionDispatcher connectionDispatcher = async (input, output, _) =>
{
FillBuffer(output, sendLength / 4);
await output.FlushAsync();
output.Complete();
input.Complete();
};
using (var testServer = CreateTestServer(connectionDispatcher))
{
await testServer.BindAsync();
using (var client = testServer.ConnectTo())
{
int totalReceived = 0;
var receiveBuffer = new byte[4000];
bool eof = false;
do
{
int offset = 0;
int received = 0;
do
{
var receive = client.Receive(new ArraySegment<byte>(receiveBuffer, offset, receiveBuffer.Length - offset));
received += receive;
offset += receive;
eof = receive == 0;
} while (!eof && offset != receiveBuffer.Length);
AssertCounter(new ArraySegment<byte>(receiveBuffer, 0, received), totalReceived / 4);
totalReceived += received;
} while (!eof);
Assert.True(totalReceived == sendLength);
}
await testServer.StopAsync();
}
}
[Fact]
public async Task UnixSocketListenType()
{
TestServerConnectionDispatcher connectionDispatcher = async (input, output, _) =>
{
int threadId = Thread.CurrentThread.ManagedThreadId;
var data = Encoding.UTF8.GetBytes(threadId.ToString());
output.Write(data);
await output.FlushAsync();
output.Complete();
input.Complete();
};
using (var testServer = CreateTestServer(options =>
{ options.ConnectionDispatcher = connectionDispatcher;
options.ThreadCount = 2;
options.ApplicationSchedulingMode = PipeScheduler.Inline;
options.UnixSocketPath = $"{Path.GetTempPath()}/{Path.GetRandomFileName()}"; }))
{
await testServer.BindAsync();
// This test is racy, it's ok to ignore occasional failure.
// PipeScheduler.Inline isn't inline when there is no pending IConnectionListener.AcceptAsync.
// Wait a little to increase probability of AcceptAsync.
await Task.Delay(50);
int[] threadIds = new int[4];
for (int i = 0; i < 4; i++)
{
using (var client = testServer.ConnectTo())
{
byte[] receiveBuffer = new byte[10];
int received = client.Receive(new ArraySegment<byte>(receiveBuffer));
int threadId;
Assert.NotEqual(0, received);
Assert.True(int.TryParse(Encoding.UTF8.GetString(receiveBuffer, 0, received), out threadId));
threadIds[i] = threadId;
// check if the server closed the client.
// this would fail if not all fds for this client are closed
received = client.Receive(new ArraySegment<byte>(receiveBuffer));
Assert.Equal(0, received);
}
// See earlier comment, wait a little to increase probability of AcceptAsync.
await Task.Delay(50);
}
// check we are doing round robin over 2 handling threads
Assert.NotEqual(threadIds[0], threadIds[1]);
Assert.Equal(threadIds[0], threadIds[2]);
Assert.Equal(threadIds[1], threadIds[3]);
await testServer.StopAsync();
}
}
[Fact]
public async Task FailedBindThrows()
{
int port = 50;
using (var testServer = CreateTestServer(options =>
options.IPEndPoint = new IPEndPoint(IPAddress.Loopback, port)))
{
await Assert.ThrowsAnyAsync<Exception>(() => testServer.BindAsync());
await testServer.StopAsync();
}
}
[Fact]
public async Task BatchedSendReceive()
{
// We block the TransportThread to ensure 2 clients are sending multiple buffers with random data.
// These buffers are echoed back.
// The clients verify they each receive the random data they sent.
const int DataLength = 10_000;
int connectionCount = 0;
SemaphoreSlim clientsAcceptedSemaphore = new SemaphoreSlim(0, 1);
SemaphoreSlim dataSentSemaphore = new SemaphoreSlim(0, 1);
TestServerConnectionDispatcher connectionDispatcher = async (input, output, _) =>
{
connectionCount++;
if (connectionCount == 3)
{
// Ensure we accepted the clients.
clientsAcceptedSemaphore.Release();
// Now wait for clients to send data.
dataSentSemaphore.Wait();
}
// Echo
while (true)
{
var result = await input.ReadAsync();
var request = result.Buffer;
if (request.IsEmpty && result.IsCompleted)
{
input.AdvanceTo(request.End);
break;
}
// Clients send more data than what fits in a single segment.
Assert.True(!request.IsSingleSegment);
foreach (var memory in request)
{
output.Write(memory.Span);
}
await output.FlushAsync();
input.AdvanceTo(request.End);
}
output.Complete();
input.Complete();
};
using (var testServer = CreateTestServer(connectionDispatcher))
{
await testServer.BindAsync();
using (var client1 = testServer.ConnectTo())
{
using (var client2 = testServer.ConnectTo())
{
using (var client3 = testServer.ConnectTo())
{ }
// Wait for all clients to be accepted.
// The TransportThread will now be blocked.
clientsAcceptedSemaphore.Wait();
// Send data
var client1DataSent = new byte[DataLength];
FillRandom(client1DataSent);
var client2DataSent = new byte[DataLength];
FillRandom(client2DataSent);
int bytesSent = client1.Send(new ArraySegment<byte>(client1DataSent));
Assert.Equal(DataLength, bytesSent);
bytesSent = client2.Send(new ArraySegment<byte>(client2DataSent));
Assert.Equal(DataLength, bytesSent);
// Unblock the TransportThread
dataSentSemaphore.Release();
// Receive echoed data.
var client1DataReceived = new byte[DataLength];
int bytesReceived = client1.Receive(new ArraySegment<byte>(client1DataReceived));
Assert.Equal(DataLength, bytesReceived);
Assert.Equal(client1DataSent, client1DataReceived);
var client2DataReceived = new byte[DataLength];
bytesReceived = client2.Receive(new ArraySegment<byte>(client2DataReceived));
Assert.Equal(DataLength, bytesReceived);
Assert.Equal(client2DataSent, client2DataReceived);
}
}
await testServer.StopAsync();
}
}
private static Random s_random = new System.Random();
private void FillRandom(byte[] data)
{
lock (s_random)
{
for (int i = 0; i < data.Length; i++)
{
data[i] = (byte)s_random.Next(256);
}
}
}
private unsafe static void FillBuffer(PipeWriter writer, int count)
{
for (int i = 0; i < count; i++)
{
var memory = writer.GetMemory(4);
var bufferHandle = memory.Pin();
void* pointer = bufferHandle.Pointer;
*(int*)pointer = i;
bufferHandle.Dispose();
writer.Advance(4);
}
}
private unsafe static void FillBuffer(ArraySegment<byte> segment, int value)
{
Assert.True(segment.Count % 4 == 0);
fixed (byte* bytePtr = segment.Array)
{
int* intPtr = (int*)(bytePtr + segment.Offset);
for (int i = 0; i < segment.Count / 4; i++)
{
*intPtr++ = value++;
}
}
}
private unsafe static void AssertCounter(ArraySegment<byte> segment, int value)
{
Assert.True(segment.Count % 4 == 0);
fixed (byte* bytePtr = segment.Array)
{
int* intPtr = (int*)(bytePtr + segment.Offset);
for (int i = 0; i < segment.Count / 4; i++)
{
Assert.Equal(value++, *intPtr++);
}
}
}
private static unsafe void AssertCounter(ref ReadOnlySequence<byte> buffer, ref int bytesReceived, ref int remainderRef)
{
int remainder = remainderRef;
int currentValue = bytesReceived / 4;
foreach (var memory in buffer)
{
var bufferHandle = memory.Pin();
void* pointer = bufferHandle.Pointer;
byte* pMemory = (byte*)pointer;
int length = memory.Length;
// remainder
int offset = bytesReceived % 4;
if (offset != 0)
{
int read = Math.Min(length, 4 - offset);
byte* ptr = (byte*)&remainder;
ptr += offset;
for (int i = 0; i < read; i++)
{
*ptr++ = *pMemory++;
}
length -= read;
if (read == (4 - offset))
{
Assert.Equal(currentValue++, remainder);
}
}
// whole ints
int* pMemoryInt = (int*)pMemory;
int count = length / 4;
for (int i = 0; i < count; i++)
{
Assert.Equal(currentValue++, *pMemoryInt++);
length -= 4;
}
// remainder
if (length != 0)
{
pMemory = (byte*)pMemoryInt;
byte* ptr = (byte*)&remainder;
for (int i = 0; i < length; i++)
{
*ptr++ = *pMemory++;
}
}
bytesReceived += memory.Length;
bufferHandle.Dispose();
}
remainderRef = remainder;
}
}
public sealed class DefaultOptionsTransportTests : TransportTestsBase
{
protected override TestServerOptions CreateOptions() => new TestServerOptions();
}
public sealed class AioTransportTests : TransportTestsBase
{
protected override TestServerOptions CreateOptions()
=> new TestServerOptions()
{
AioReceive = true,
AioSend = true
};
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/TransportThread.cs<|end_filename|>
using System;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.Extensions.Logging;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
sealed partial class TransportThread : ITransportActionHandler
{
private readonly object _gate = new object();
private TransportThreadState _state;
private TaskCompletionSource<object> _stateChangeCompletion;
private Thread _thread;
private ThreadContext _threadContext;
public int ThreadId { get; }
public IPEndPoint EndPoint { get; }
public LinuxTransportOptions TransportOptions { get; }
public AcceptThread AcceptThread { get; }
public ILoggerFactory LoggerFactory { get; }
public int CpuId { get; }
public TransportThread(IPEndPoint endPoint, LinuxTransportOptions options, AcceptThread acceptThread, int threadId, int cpuId, ILoggerFactory loggerFactory)
{
ThreadId = threadId;
CpuId = cpuId;
EndPoint = endPoint;
TransportOptions = options;
AcceptThread = acceptThread;
LoggerFactory = loggerFactory;
}
public Task BindAsync()
{
TaskCompletionSource<object> tcs;
lock (_gate)
{
if (_state == TransportThreadState.Started)
{
return Task.CompletedTask;
}
else if (_state == TransportThreadState.Starting)
{
return _stateChangeCompletion.Task;
}
else if (_state != TransportThreadState.Initial)
{
ThrowInvalidState();
}
try
{
tcs = _stateChangeCompletion = new TaskCompletionSource<object>();
_state = TransportThreadState.Starting;
_thread = new Thread(PollThread);
_thread.IsBackground = true;
_thread.Start();
}
catch
{
_state = TransportThreadState.Stopped;
throw;
}
}
return tcs.Task;
}
public async Task UnbindAsync()
{
TaskCompletionSource<object> tcs = null;
lock (_gate)
{
if (_state == TransportThreadState.Initial)
{
_state = TransportThreadState.Stopped;
return;
}
else if (_state == TransportThreadState.AcceptClosed || _state == TransportThreadState.Stopping || _state == TransportThreadState.Stopped)
{
return;
}
else if (_state == TransportThreadState.ClosingAccept)
{
tcs = _stateChangeCompletion;
}
}
if (tcs != null)
{
await tcs.Task;
return;
}
try
{
await BindAsync();
}
catch
{}
bool triggerStateChange = false;
lock (_gate)
{
if (_state == TransportThreadState.AcceptClosed || _state == TransportThreadState.Stopping || _state == TransportThreadState.Stopped)
{
return;
}
else if (_state == TransportThreadState.ClosingAccept)
{
tcs = _stateChangeCompletion;
}
else if (_state == TransportThreadState.Started)
{
triggerStateChange = true;
tcs = _stateChangeCompletion = new TaskCompletionSource<object>();
_state = TransportThreadState.ClosingAccept;
}
else
{
// Cannot happen
ThrowInvalidState();
}
}
if (triggerStateChange)
{
_threadContext.RequestCloseAccept();
}
await tcs.Task;
}
public async Task StopAsync()
{
lock (_gate)
{
if (_state == TransportThreadState.Initial)
{
_state = TransportThreadState.Stopped;
return;
}
}
await UnbindAsync();
TaskCompletionSource<object> tcs = null;
bool triggerStateChange = false;
lock (_gate)
{
if (_state == TransportThreadState.Stopped)
{
return;
}
else if (_state == TransportThreadState.Stopping)
{
tcs = _stateChangeCompletion;
}
else if (_state == TransportThreadState.AcceptClosed)
{
tcs = _stateChangeCompletion = new TaskCompletionSource<object>();
_state = TransportThreadState.Stopping;
triggerStateChange = true;
}
else
{
// Cannot happen
ThrowInvalidState();
}
}
if (triggerStateChange)
{
_threadContext.RequestStopSockets();
}
await tcs.Task;
}
private unsafe void PollThread(object obj)
{
if (CpuId != -1)
{
SystemScheduler.SetCurrentThreadAffinity(CpuId);
}
using (ThreadContext context = new ThreadContext(this))
{
_threadContext = context;
context.Run();
}
}
private void ThrowInvalidState()
{
throw new InvalidOperationException($"nameof(TransportThread) is {_state}");
}
private void CompleteStateChange(TransportThreadState state, Exception error)
{
TaskCompletionSource<object> tcs;
lock (_gate)
{
tcs = _stateChangeCompletion;
_stateChangeCompletion = null;
_state = state;
}
ThreadPool.QueueUserWorkItem(o =>
{
if (error != null)
{
tcs?.SetException(error);
}
else
{
tcs?.SetResult(null);
}
});
}
public async ValueTask<ConnectionContext> AcceptAsync(CancellationToken cancellationToken = default)
{
ValueTask<TSocket> acceptTask;
lock (_gate)
{
if (_state > TransportThreadState.ClosingAccept)
{
return null;
}
else if (_state < TransportThreadState.Started)
{
ThrowInvalidState();
}
acceptTask = _threadContext.AcceptAsync(cancellationToken);
}
return await acceptTask;
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/TransportConstants.cs<|end_filename|>
using System;
using System.Net;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
static class TransportConstants
{
public const int MaxEAgainCount = 10;
public static PosixResult TooManyEAgain = new PosixResult(int.MinValue);
public static readonly Exception StopSentinel = new Exception();
}
}
<|start_filename|>test/RedHat.AspNetCore.Server.Kestrel.Transport.Linux.TestApp/Program.cs<|end_filename|>
using System;
using RedHat.AspNetCore.Server.Kestrel.Transport.Linux;
namespace ConsoleApplication
{
public class Program
{
public static void Main(string[] args)
{
// Friend Application
}
}
}
<|start_filename|>test/RedHat.AspNetCore.Server.Kestrel.Transport.Linux.Test/TransportTests.TestServer.cs<|end_filename|>
using System;
using System.Buffers;
using System.IO.Pipelines;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.AspNetCore.Connections.Features;
using Microsoft.Extensions.Logging;
using RedHat.AspNetCore.Server.Kestrel.Transport.Linux;
using Xunit;
using static Tmds.Linux.LibC;
using Socket = RedHat.AspNetCore.Server.Kestrel.Transport.Linux.Socket;
namespace Tests
{
public delegate Task TestServerConnectionDispatcher(PipeReader input, PipeWriter output, ConnectionContext connection);
public class TestServerOptions
{
public int ThreadCount { get; set; } = 1;
public bool DeferAccept { get; set; } = false;
public TestServerConnectionDispatcher ConnectionDispatcher { get; set; } = TestServer.Echo;
public string UnixSocketPath { get; set; }
public IPEndPoint IPEndPoint { get; set; }
public bool AioSend { get; set; } = false;
public bool AioReceive { get; set; } = false;
public PipeScheduler ApplicationSchedulingMode { get; set; } = PipeScheduler.ThreadPool;
}
internal class DuplexPipe : IDuplexPipe
{
public DuplexPipe(PipeReader reader, PipeWriter writer)
{
Input = reader;
Output = writer;
}
public PipeReader Input { get; }
public PipeWriter Output { get; }
}
class TestServer : IDisposable
{
private Transport _transport;
private IPEndPoint _serverAddress;
private string _unixSocketPath;
private TestServerConnectionDispatcher _connectionDispatcher;
private Task _acceptLoopTask;
public TestServer(TestServerOptions options = null)
{
options = options ?? new TestServerOptions();
_connectionDispatcher = options.ConnectionDispatcher;
var transportOptions = new LinuxTransportOptions()
{
ThreadCount = options.ThreadCount,
DeferAccept = options.DeferAccept,
AioReceive = options.AioReceive,
AioSend = options.AioSend,
ApplicationSchedulingMode = options.ApplicationSchedulingMode,
};
var loggerFactory = new LoggerFactory();
EndPoint endPoint = null;
if (options.UnixSocketPath != null)
{
_unixSocketPath = options.UnixSocketPath;
endPoint = new UnixDomainSocketEndPoint(_unixSocketPath);
}
else
{
_serverAddress = options.IPEndPoint ?? new IPEndPoint(IPAddress.Loopback, 0);
endPoint = _serverAddress;
}
_transport = new Transport(endPoint, transportOptions, loggerFactory);
}
public TestServer(TestServerConnectionDispatcher connectionDispatcher) :
this(new TestServerOptions() { ConnectionDispatcher = connectionDispatcher })
{}
public async Task BindAsync()
{
await _transport.BindAsync();
// Make sure continuations don't need to post to xunit's MaxConcurrencySyncContext.
_acceptLoopTask = Task.Run(AcceptLoopAsync);
}
public async Task UnbindAsync()
{
await _transport.UnbindAsync();
await _acceptLoopTask;
}
public ValueTask StopAsync()
{
return _transport.DisposeAsync();
}
private async Task AcceptLoopAsync()
{
while (true)
{
var connection = await _transport.AcceptAsync();
if (connection == null)
{
break;
}
_ = OnConnection(connection);
}
}
private async Task OnConnection(ConnectionContext connection)
{
// Handle the connection
await _connectionDispatcher(connection.Transport.Input, connection.Transport.Output, connection);
// Wait for the transport to close
await CancellationTokenAsTask(connection.ConnectionClosed);
await connection.DisposeAsync();
}
private static Task CancellationTokenAsTask(CancellationToken token)
{
if (token.IsCancellationRequested)
{
return Task.CompletedTask;
}
// Transports already dispatch prior to tripping ConnectionClosed
// since application code can register to this token.
var tcs = new TaskCompletionSource<object>();
token.Register(state => ((TaskCompletionSource<object>)state).SetResult(null), tcs);
return tcs.Task;
}
public void Dispose()
{
ValueTask stopTask = _transport.DisposeAsync();
// Tests must have called StopAsync already.
Assert.True(stopTask.IsCompleted);
}
public static async Task Echo(PipeReader input, PipeWriter output, ConnectionContext connection)
{
try
{
while (true)
{
var result = await input.ReadAsync();
var request = result.Buffer;
if (request.IsEmpty && result.IsCompleted)
{
input.AdvanceTo(request.End);
break;
}
foreach (var memory in request)
{
output.Write(memory.Span);
}
await output.FlushAsync();
input.AdvanceTo(request.End);
}
}
catch
{ }
finally
{
input.Complete();
output.Complete();
}
}
public Socket ConnectTo()
{
if (_unixSocketPath != null)
{
var client = Socket.Create(AF_UNIX, SOCK_STREAM, 0, blocking: true);
client.Connect(_unixSocketPath);
return client;
}
else if (_serverAddress != null)
{
var client = Socket.Create(AF_INET, SOCK_STREAM, IPPROTO_TCP, blocking: true);
client.SetSocketOption(SOL_TCP, TCP_NODELAY, 1);
client.Connect(_serverAddress);
return client;
}
else
{
return null;
}
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/IOInterop.cs<|end_filename|>
// Copyright 2017 <NAME> <<EMAIL>>
// This software is made available under the MIT License
// See COPYING for details
using System;
using System.Runtime.InteropServices;
using Tmds.Linux;
using static Tmds.Linux.LibC;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
static class IOInterop
{
public static PosixResult Close(int handle)
{
int rv = close(handle);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult Write(SafeHandle handle, byte* buf, int count)
{
bool addedRef = false;
try
{
handle.DangerousAddRef(ref addedRef);
int fd = handle.DangerousGetHandle().ToInt32();
ssize_t rv;
do
{
rv = write(fd, buf, count);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
finally
{
if (addedRef)
{
handle.DangerousRelease();
}
}
}
public static unsafe PosixResult Read(SafeHandle handle, byte* buf, int count)
{
bool addedRef = false;
try
{
handle.DangerousAddRef(ref addedRef);
int fd = handle.DangerousGetHandle().ToInt32();
ssize_t rv;
do
{
rv = read(fd, buf, count);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
finally
{
if (addedRef)
{
handle.DangerousRelease();
}
}
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/IPEndPointStruct.cs<|end_filename|>
using System.Net;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
internal struct IPEndPointStruct
{
public IPEndPointStruct(IPAddress address, int port)
{
Address = address;
Port = port;
}
public IPAddress Address { get; set; }
public int Port { get; set; }
public System.Net.Sockets.AddressFamily AddressFamily => Address.AddressFamily;
public static implicit operator IPEndPointStruct(IPEndPoint endPoint)
=> new IPEndPointStruct(endPoint.Address, endPoint.Port);
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/TransportThread.TSocket.cs<|end_filename|>
using System;
using System.Buffers;
using System.IO.Pipelines;
using System.Net;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Tmds.Linux;
using static Tmds.Linux.LibC;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
sealed partial class TransportThread
{
[Flags]
enum SocketFlags
{
None = 0,
AwaitReadable = 0x01, // EPOLLIN
AwaitWritable = 0x04, // EPOLLOUT
AwaitZeroCopy = 0x08, // EPOLLERR
EventControlRegistered = 0x10, // EPOLLHUP
EventControlPending = 1 << 30, // EPOLLONESHOT
CloseEnd = 0x20,
BothClosed = 0x40,
TypeAccept = 0x100,
TypeClient = 0x200,
TypePassFd = 0x300,
TypeMask = 0x300,
DeferAccept = 0x400,
WriteCanceled = 0x1000,
ReadCanceled = 0x2000,
DeferSend = 0x4000
}
class TSocket : TransportConnection
{
public struct ReceiveMemoryAllocation
{
public int FirstMemorySize;
public int IovLength;
}
private const int ZeroCopyNone = 0;
private const int ZeroCopyComplete = 1;
private const int ZeroCopyAwait = 2;
private const int MSG_ZEROCOPY = 0x4000000;
private const int CheckAvailable = -1;
private const int CheckAvailableIgnoreReceived = -2;
// 8 IOVectors, take up 128B of stack, can receive/send up to 32KB
public const int MaxIOVectorSendLength = 8;
public const int MaxIOVectorReceiveLength = 8;
private const int EventControlRegistered = (int)SocketFlags.EventControlRegistered;
public const int EventControlPending = (int)SocketFlags.EventControlPending;
// Copied from LibuvTransportOptions.MaxReadBufferSize
private const int PauseInputWriterThreshold = 1024 * 1024;
// Copied from LibuvTransportOptions.MaxWriteBufferSize
private const int PauseOutputWriterThreshold = 64 * 1024;
public readonly object Gate = new object();
private readonly ThreadContext _threadContext;
public readonly int Fd;
private readonly Action _onFlushedToApp;
private readonly Action _onReadFromApp;
private readonly MemoryHandle[] _sendMemoryHandles;
private readonly CancellationTokenSource _connectionClosedTokenSource;
private readonly TaskCompletionSource<object> _waitForConnectionClosedTcs;
public int ZeroCopyThreshold;
private SocketFlags _flags;
private ValueTaskAwaiter<ReadResult> _readAwaiter;
private ValueTaskAwaiter<FlushResult> _flushAwaiter;
private int _zeropCopyState;
private SequencePosition _zeroCopyEnd;
private int _readState = CheckAvailable;
public TSocket(ThreadContext threadContext, int fd, SocketFlags flags, LinuxTransportOptions options)
{
_threadContext = threadContext;
Fd = fd;
_flags = flags;
_onFlushedToApp = new Action(OnFlushedToApp);
_onReadFromApp = new Action(OnReadFromApp);
_connectionClosedTokenSource = new CancellationTokenSource();
ConnectionClosed = _connectionClosedTokenSource.Token;
_waitForConnectionClosedTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
if (!IsDeferSend)
{
_sendMemoryHandles = new MemoryHandle[MaxIOVectorSendLength];
}
var inputOptions = new PipeOptions(MemoryPool, options.ApplicationSchedulingMode, PipeScheduler.Inline, PauseInputWriterThreshold, PauseInputWriterThreshold / 2, useSynchronizationContext: false);
var outputOptions = new PipeOptions(MemoryPool, PipeScheduler.Inline, options.ApplicationSchedulingMode, PauseOutputWriterThreshold, PauseOutputWriterThreshold / 2, useSynchronizationContext: false);
var pair = DuplexPipe.CreateConnectionPair(inputOptions, outputOptions);
Transport = pair.Transport;
Application = pair.Application;
}
public PipeWriter Input => Application.Output;
public PipeReader Output => Application.Input;
public bool IsDeferAccept => HasFlag(SocketFlags.DeferAccept);
public bool IsDeferSend => HasFlag(SocketFlags.DeferSend);
public SocketFlags Type => ((SocketFlags)_flags & SocketFlags.TypeMask);
private int MaxBufferSize => MemoryPool.MaxBufferSize;
private int BufferMargin => MaxBufferSize / 4;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool HasFlag(SocketFlags flag) => HasFlag(_flags, flag);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool HasFlag(SocketFlags flags, SocketFlags flag) => (_flags & flag) != 0;
// must be called under Gate
public int PendingEventState
{
get => (int)_flags;
set => _flags = (SocketFlags)value;
}
// TODO: Do something with the abortReason argument like logging it and complete the app input pipe with it.
public override void Abort(ConnectionAbortedException abortReason)
{
Output.CancelPendingRead();
CancelWriteToSocket();
}
public override async ValueTask DisposeAsync()
{
Transport.Input.Complete();
Transport.Output.Complete();
Abort();
await _waitForConnectionClosedTcs.Task;
_connectionClosedTokenSource.Dispose();
}
private void CancelWriteToSocket()
{
bool completeWritable = false;
lock (Gate)
{
var flags = _flags;
if (HasFlag(flags, SocketFlags.WriteCanceled))
{
return;
}
if (HasFlag(flags, SocketFlags.AwaitWritable))
{
completeWritable = true;
}
if (HasFlag(flags, SocketFlags.AwaitZeroCopy))
{
// Terminate pending zero copy
// Call it under Gate so it doesn't race with Close
SocketInterop.Disconnect(Fd);
}
flags &= ~SocketFlags.AwaitWritable;
flags |= SocketFlags.WriteCanceled;
_flags = flags;
}
if (completeWritable)
{
OnWritable(stopped: true);
}
}
private void CancelReadFromSocket()
{
bool completeReadable = false;
lock (Gate)
{
var flags = _flags;
if (HasFlag(flags, SocketFlags.ReadCanceled))
{
return;
}
if (HasFlag(flags, SocketFlags.AwaitReadable))
{
completeReadable = true;
}
flags &= ~SocketFlags.AwaitReadable;
flags |= SocketFlags.ReadCanceled;
_flags = flags;
}
if (completeReadable)
{
CompleteInput(new ConnectionAbortedException());
}
}
private void ReadFromApp()
{
bool deferSend = IsDeferSend;
bool loop = !deferSend;
do
{
_readAwaiter = Output.ReadAsync().GetAwaiter();
if (_readAwaiter.IsCompleted)
{
if (deferSend)
{
_threadContext.ScheduleSend(this);
}
else
{
loop = OnReadFromApp(loop, _sendMemoryHandles);
}
}
else
{
_readAwaiter.UnsafeOnCompleted(_onReadFromApp);
loop = false;
}
} while (loop);
}
private void OnReadFromApp()
{
if (IsDeferSend)
{
_threadContext.ScheduleSend(this);
}
else
{
OnReadFromApp(loop: false, _sendMemoryHandles);
}
}
public void DoDeferedSend(Span<MemoryHandle> memoryHandles)
{
OnReadFromApp(loop: false, memoryHandles);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public Exception GetReadResult(out ReadOnlySequence<byte> buffer)
{
try
{
ReadResult readResult = _readAwaiter.GetResult();
buffer = readResult.Buffer;
if ((buffer.IsEmpty && readResult.IsCompleted) || readResult.IsCanceled)
{
// EOF or TransportThread stopped
return TransportConstants.StopSentinel;
}
else
{
return null;
}
}
catch (Exception e)
{
buffer = default(ReadOnlySequence<byte>);
return e;
}
}
private unsafe bool OnReadFromApp(bool loop, Span<MemoryHandle> memoryHandles)
{
ReadOnlySequence<byte> buffer;
Exception error = GetReadResult(out buffer);
if (error != null)
{
if (error == TransportConstants.StopSentinel)
{
error = null;
}
CompleteOutput(error);
return false;
}
else
{
int ioVectorLength = CalcIOVectorLengthForSend(ref buffer, MaxIOVectorSendLength);
var ioVectors = stackalloc iovec[ioVectorLength];
FillSendIOVector(ref buffer, ioVectors, ioVectorLength, memoryHandles);
bool zerocopy = buffer.Length >= ZeroCopyThreshold;
(PosixResult result, bool zeroCopyRegistered) = TrySend(zerocopy, ioVectors, ioVectorLength);
for (int i = 0; i < ioVectorLength; i++)
{
memoryHandles[i].Dispose();
}
return HandleSendResult(ref buffer, result, loop, zerocopy, zeroCopyRegistered);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool HandleSendResult(ref ReadOnlySequence<byte> buffer, PosixResult result, bool loop, bool zerocopy, bool zeroCopyRegistered)
{
SequencePosition end;
if (result.Value == buffer.Length)
{
end = buffer.End;
}
else if (result.IsSuccess)
{
end = buffer.GetPosition(result.Value);
}
else if (result == PosixResult.EAGAIN)
{
Output.AdvanceTo(buffer.Start);
WaitSocketWritable();
return false;
}
else if (zerocopy && result == PosixResult.ENOBUFS)
{
// We reached the max locked memory (ulimit -l), disable zerocopy.
end = buffer.Start;
ZeroCopyThreshold = LinuxTransportOptions.NoZeroCopy;
}
else
{
CompleteOutput(result.AsException());
return false;
}
if (zerocopy && result.Value > 0)
{
_zeroCopyEnd = end;
return WaitZeroCopyComplete(loop, zeroCopyRegistered);
}
// We need to call Advance to end the read
Output.AdvanceTo(end);
if (!loop)
{
ReadFromApp();
}
return loop;
}
private bool WaitZeroCopyComplete(bool loop, bool registered)
{
if (registered)
{
int previousState = Interlocked.CompareExchange(ref _zeropCopyState, ZeroCopyAwait, ZeroCopyNone);
if (previousState == ZeroCopyComplete)
{
// registered, complete
return FinishZeroCopy(loop);
}
else
{
// registered, not completed
return false;
}
}
else
{
// not registered
lock (Gate)
{
RegisterFor(EPOLLERR);
}
return false;
}
}
public void OnZeroCopyCompleted()
{
int previousState = Interlocked.CompareExchange(ref _zeropCopyState, ZeroCopyAwait, ZeroCopyNone);
if (previousState == ZeroCopyAwait)
{
FinishZeroCopy(loop: false);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool FinishZeroCopy(bool loop)
{
Volatile.Write(ref _zeropCopyState, ZeroCopyNone);
Output.AdvanceTo(_zeroCopyEnd);
_zeroCopyEnd = default(SequencePosition);
if (!loop)
{
ReadFromApp();
}
return loop;
}
public void CompleteOutput(Exception outputError)
{
Output.Complete(outputError);
CancelReadFromSocket();
CleanupSocketEnd();
}
private void WaitSocketWritable()
{
bool stopped = false;
lock (Gate)
{
stopped = HasFlag(SocketFlags.WriteCanceled);
if (!stopped)
{
RegisterFor(EPOLLOUT);
}
}
if (stopped)
{
OnWritable(true);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void OnWritable(bool stopped)
{
if (stopped)
{
CompleteOutput(null);
}
else
{
ReadFromApp();
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void RegisterFor(int ev)
{
// called under tsocket.Gate
var pendingEventState = PendingEventState;
bool registered = (pendingEventState & TSocket.EventControlRegistered) != 0;
pendingEventState |= TSocket.EventControlRegistered | ev;
PendingEventState = pendingEventState;
if ((pendingEventState & TSocket.EventControlPending) == 0)
{
_threadContext.UpdateEPollControl(this, pendingEventState, registered);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void CleanupSocketEnd()
{
lock (Gate)
{
_flags = _flags + (int)SocketFlags.CloseEnd;
if (!HasFlag(SocketFlags.BothClosed))
{
return;
}
}
// First remove from the Dictionary, so we can't match with a new fd.
bool lastSocket = _threadContext.RemoveSocket(Fd);
// We are not using SafeHandles to increase performance.
// We get here when both reading and writing has stopped
// so we are sure this is the last use of the Socket.
Close();
// Inform the application.
ThreadPool.UnsafeQueueUserWorkItem(state => ((TSocket)state).CancelConnectionClosedToken(), this);
if (lastSocket)
{
_threadContext.StopThread();
}
}
private void CancelConnectionClosedToken()
{
_connectionClosedTokenSource.Cancel();
_waitForConnectionClosedTcs.SetResult(null);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe int FillReceiveIOVector(ReceiveMemoryAllocation memoryAllocation, iovec* ioVectors, Span<MemoryHandle> handles)
{
PipeWriter writer = Input;
int advanced = 0;
Memory<byte> memory = writer.GetMemory(memoryAllocation.FirstMemorySize);
int length = memory.Length;
for (int i = 0; i < memoryAllocation.IovLength; i++)
{
var bufferHandle = memory.Pin();
ioVectors[i].iov_base = bufferHandle.Pointer;
ioVectors[i].iov_len = length;
handles[i] = bufferHandle;
// Every Memory (except the last one) must be filled completely.
if (i != (memoryAllocation.IovLength - 1))
{
writer.Advance(length);
advanced += length;
memory = writer.GetMemory(MaxBufferSize);
length = MaxBufferSize;
}
}
return advanced;
}
public unsafe PosixResult Receive(Span<MemoryHandle> handles)
{
ReceiveMemoryAllocation memoryAllocation = DetermineMemoryAllocationForReceive(MaxIOVectorReceiveLength);
var ioVectors = stackalloc iovec[memoryAllocation.IovLength];
int advanced = FillReceiveIOVector(memoryAllocation, ioVectors, handles);
try
{
// Ideally we get availableBytes in a single receive
// but we are happy if we get at least a part of it
// and we are willing to take {MaxEAgainCount} EAGAINs.
// Less data could be returned due to these reasons:
// * TCP URG
// * packet was not placed in receive queue (race with FIONREAD)
// * ?
var eAgainCount = 0;
var received = 0;
do
{
var result = SocketInterop.Receive(Fd, ioVectors, memoryAllocation.IovLength);
(bool done, PosixResult retval) = InterpretReceiveResult(result, ref received, advanced, ioVectors, memoryAllocation.IovLength);
if (done)
{
return retval;
}
else if (retval == PosixResult.EAGAIN)
{
eAgainCount++;
if (eAgainCount == TransportConstants.MaxEAgainCount)
{
return TransportConstants.TooManyEAgain;
}
}
else
{
eAgainCount = 0;
}
} while (true);
}
finally
{
for (int i = 0; i < memoryAllocation.IovLength; i++)
{
handles[i].Dispose();
}
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe (bool done, PosixResult receiveResult) InterpretReceiveResult(PosixResult result, ref int received, int advanced, iovec* ioVectors, int ioVectorLength)
{
PipeWriter writer = Input;
if (result.IsSuccess)
{
received += result.IntValue;
if (received >= advanced)
{
// We made it!
int finalAdvance = received - advanced;
int spaceRemaining = (int)(ioVectors[ioVectorLength - 1].iov_len) - finalAdvance;
if (spaceRemaining == 0)
{
// We used up all room, assume there is a remainder to be read.
_readState = CheckAvailableIgnoreReceived;
}
else
{
if (_readState == CheckAvailableIgnoreReceived)
{
// We've read the remainder.
_readState = CheckAvailable;
}
else
{
_readState = received;
}
}
writer.Advance(finalAdvance);
return (true, new PosixResult(received == 0 ? 0 : 1));
}
// Update ioVectors to match bytes read
var skip = (size_t)result.Value;
for (int i = 0; (i < ioVectorLength) && (skip > 0); i++)
{
var length = ioVectors[i].iov_len;
var skipped = skip < length ? skip : length;
ioVectors[i].iov_len = length - skipped;
ioVectors[i].iov_base = (byte*)ioVectors[i].iov_base + skipped;
skip -= skipped;
}
return (false, new PosixResult(1));
}
else if (result == PosixResult.EAGAIN)
{
return (advanced == 0, result);
}
else if (result == PosixResult.ECONNRESET)
{
return (true, result);
}
else
{
return (true, result);
}
}
private void ReceiveFromSocket()
{
bool stopped = false;
lock (Gate)
{
stopped = HasFlag(SocketFlags.ReadCanceled);
if (!stopped)
{
RegisterFor(EPOLLIN);
}
}
if (stopped)
{
CompleteInput(new ConnectionAbortedException());
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void OnReceiveFromSocket(PosixResult result)
{
if (result.Value == 0)
{
// EOF
CompleteInput(null);
}
else if (result.IsSuccess)
{
// Data received
FlushToApp();
}
else if (result == PosixResult.EAGAIN)
{
// EAGAIN
ReceiveFromSocket();
}
else
{
// Error
Exception error;
if (result == PosixResult.ECONNRESET)
{
error = new ConnectionResetException(result.ErrorDescription(), result.AsException());
}
else if (result == TransportConstants.TooManyEAgain)
{
error = new NotSupportedException("Too many EAGAIN, unable to receive available bytes.");
}
else
{
error = result.AsException();
}
CompleteInput(error);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void FlushToApp()
{
_flushAwaiter = Input.FlushAsync().GetAwaiter();
if (_flushAwaiter.IsCompleted)
{
OnFlushedToApp();
}
else
{
_flushAwaiter.UnsafeOnCompleted(_onFlushedToApp);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void OnFlushedToApp()
{
Exception error = null;
try
{
FlushResult flushResult = _flushAwaiter.GetResult();
if (flushResult.IsCompleted || // Reader has stopped
flushResult.IsCanceled) // TransportThread has stopped
{
error = TransportConstants.StopSentinel;
}
}
catch (Exception e)
{
error = e;
}
if (error == null)
{
ReceiveFromSocket();
}
else
{
if (error == TransportConstants.StopSentinel)
{
error = null;
}
CompleteInput(error);
}
}
private void CompleteInput(Exception error)
{
Input.Complete(error);
CleanupSocketEnd();
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int CalcIOVectorLengthForSend(ref ReadOnlySequence<byte> buffer, int maxIOVectorSendLength)
{
int ioVectorLength = 0;
foreach (var memory in buffer)
{
if (memory.Length == 0)
{
continue;
}
ioVectorLength++;
if (ioVectorLength == maxIOVectorSendLength)
{
// No more room in the IOVector
break;
}
}
return ioVectorLength;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe void FillSendIOVector(ref ReadOnlySequence<byte> buffer, iovec* ioVectors, int ioVectorLength, Span<MemoryHandle> memoryHandles)
{
int i = 0;
foreach (var memory in buffer)
{
if (memory.Length == 0)
{
continue;
}
var bufferHandle = memory.Pin();
ioVectors[i].iov_base = bufferHandle.Pointer;
ioVectors[i].iov_len = memory.Length;
memoryHandles[i] = bufferHandle;
i++;
if (i == ioVectorLength)
{
// No more room in the IOVector
break;
}
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private unsafe (PosixResult, bool zerocopyRegistered) TrySend(bool zerocopy, iovec* ioVectors, int ioVectorLength)
{
bool zeroCopyRegistered = false;
if (zerocopy)
{
lock (Gate)
{
// Don't start new zerocopies when writting stopped.
if (HasFlag(SocketFlags.WriteCanceled))
{
return (new PosixResult(PosixResult.ECONNABORTED), zeroCopyRegistered);
}
// If we have a pending Readable event, it will report on the zero-copy completion too.
if ((PendingEventState & EPOLLIN) != 0)
{
PendingEventState |= EPOLLERR;
zeroCopyRegistered = true;
}
}
}
PosixResult rv = SocketInterop.Send(Fd, ioVectors, ioVectorLength, zerocopy ? MSG_ZEROCOPY : 0);
if (zerocopy && rv.Value <= 0 && zeroCopyRegistered)
{
lock (Gate)
{
PendingEventState &= ~EPOLLERR;
}
zeroCopyRegistered = false;
}
return (rv, zeroCopyRegistered);
}
public void Start(bool dataMayBeAvailable)
{
ReadFromApp();
// TODO: implement dataMayBeAvailable
ReceiveFromSocket();
}
public override MemoryPool<byte> MemoryPool => _threadContext.MemoryPool;
public PosixResult TryReceiveSocket(out int socket, bool blocking)
=> SocketInterop.ReceiveSocket(Fd, out socket, blocking);
public unsafe PosixResult TryAccept(out int socket, bool blocking)
=> SocketInterop.Accept(Fd, blocking, out socket);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public ReceiveMemoryAllocation DetermineMemoryAllocationForReceive(int maxIovLength)
{
// In this function we try to avoid the 'GetAvailableBytes' system call.
// If we read a small amount previously, we assume a single buffer is enough.
int reserve = 0;
int state = _readState;
if (state > 0) // state is amount of bytes read previously
{
// Make a guess based on what we read previously.
if (state + BufferMargin <= MaxBufferSize)
{
reserve = state + BufferMargin;
}
else if (state <= MaxBufferSize)
{
reserve = MaxBufferSize;
}
}
if (reserve == 0)
{
// We didn't make a guess, get the available bytes.
reserve = GetAvailableBytes();
if (reserve == 0)
{
reserve = MaxBufferSize / 2;
}
}
// We need to make sure we can at least fill (IovLength -1) IOVs.
// So if we are guessing, we can only return 1 IOV.
// If we read GetAvailableBytes, we can return 1 IOV more than we need exactly.
if (reserve <= MaxBufferSize)
{
return new ReceiveMemoryAllocation { FirstMemorySize = reserve, IovLength = 1 };
}
else
{
Memory<byte> memory = Input.GetMemory(MaxBufferSize / 2);
int firstMemory = memory.Length;
int iovLength = Math.Min(1 + (reserve - memory.Length + MaxBufferSize - 1) / MaxBufferSize, maxIovLength);
return new ReceiveMemoryAllocation { FirstMemorySize = firstMemory, IovLength = iovLength };
}
}
public int GetAvailableBytes()
{
PosixResult result = SocketInterop.GetAvailableBytes(Fd);
result.ThrowOnError();
return result.IntValue;
}
public void SetSocketOption(int level, int optname, int value)
{
TrySetSocketOption(level, optname, value)
.ThrowOnError();
}
public unsafe PosixResult TrySetSocketOption(int level, int optname, int value)
{
return SocketInterop.SetSockOpt(Fd, level, optname, (byte*)&value, 4);
}
public unsafe PosixResult TryBind(IPEndPointStruct endpoint)
{
sockaddr_storage addr;
Socket.GetSockaddrInet(endpoint, &addr, out int length);
int rv = bind(Fd, (sockaddr*)&addr, length);
return PosixResult.FromReturnValue(rv);
}
public void Listen(int backlog) => PosixResult.FromReturnValue(listen(Fd, backlog)).ThrowOnError();
public void Close() => IOInterop.Close(Fd);
public IPEndPointStruct GetLocalIPAddress(IPAddress reuseAddress = null)
{
IPEndPointStruct ep;
TryGetLocalIPAddress(out ep, reuseAddress)
.ThrowOnError();
return ep;
}
public unsafe PosixResult TryGetLocalIPAddress(out IPEndPointStruct ep, IPAddress reuseAddress = null)
=> SocketInterop.TryGetLocalIPAddress(Fd, out ep, reuseAddress);
public unsafe PosixResult TryGetPeerIPAddress(out IPEndPointStruct ep)
=> SocketInterop.TryGetPeerIPAddress(Fd, out ep);
internal class DuplexPipe : IDuplexPipe
{
public DuplexPipe(PipeReader reader, PipeWriter writer)
{
Input = reader;
Output = writer;
}
public PipeReader Input { get; }
public PipeWriter Output { get; }
public static DuplexPipePair CreateConnectionPair(PipeOptions inputOptions, PipeOptions outputOptions)
{
var input = new Pipe(inputOptions);
var output = new Pipe(outputOptions);
var transportToApplication = new DuplexPipe(output.Reader, input.Writer);
var applicationToTransport = new DuplexPipe(input.Reader, output.Writer);
return new DuplexPipePair(applicationToTransport, transportToApplication);
}
// This class exists to work around issues with value tuple on .NET Framework
public readonly struct DuplexPipePair
{
public IDuplexPipe Transport { get; }
public IDuplexPipe Application { get; }
public DuplexPipePair(IDuplexPipe transport, IDuplexPipe application)
{
Transport = transport;
Application = application;
}
}
}
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/PosixResult.cs<|end_filename|>
// Copyright 2017 <NAME> <<EMAIL>>
// This software is made available under the MIT License
// See COPYING for details
using System;
using System.IO;
using System.Collections.Generic;
using Microsoft.AspNetCore.Connections;
using Tmds.Linux;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
internal partial struct PosixResult
{
private ssize_t _value;
public ssize_t Value => _value;
public int IntValue => (int)_value;
public PosixResult(ssize_t value)
{
_value = value;
}
public bool IsSuccess
{
get
{
return _value >= 0;
}
}
public static PosixResult FromReturnValue(ssize_t rv)
{
return rv < 0 ? new PosixResult(-Tmds.Linux.LibC.errno) : new PosixResult(rv);
}
internal string ErrorDescription()
{
if (_value >= 0)
{
return string.Empty;
}
else
{
lock (s_errnoDescriptions)
{
int errno = (int)-_value;
string description;
if (s_errnoDescriptions.TryGetValue(errno, out description))
{
return description;
}
description = ErrorInterop.StrError(errno);
s_errnoDescriptions.Add(errno, description);
return description;
}
}
}
private static Dictionary<int, string> s_errnoDescriptions = new Dictionary<int, string>();
public Exception AsException()
{
if (IsSuccess)
{
throw new InvalidOperationException($"{nameof(PosixResult)} is not an error.");
}
return new IOException(ErrorDescription(), (int)-_value);
}
public void ThrowOnError()
{
if (!IsSuccess)
{
ThrowException();
}
}
private void ThrowException()
{
throw AsException();
}
public static implicit operator bool(PosixResult result)
{
return result.IsSuccess;
}
public override bool Equals (object obj)
{
var other = obj as PosixResult?;
if (other == null)
{
return false;
}
return _value == other.Value._value;
}
public override int GetHashCode()
{
return _value.GetHashCode();
}
public override string ToString()
{
if (IsSuccess)
{
return _value.ToString();
}
else
{
return ErrorDescription();
}
}
public static bool operator==(PosixResult lhs, int nativeValue)
{
return lhs._value == nativeValue;
}
public static bool operator!=(PosixResult lhs, int nativeValue)
{
return lhs._value != nativeValue;
}
public static bool operator==(PosixResult lhs, PosixResult rhs)
{
return lhs._value == rhs._value;
}
public static bool operator!=(PosixResult lhs, PosixResult rhs)
{
return lhs._value != rhs._value;
}
}
}
<|start_filename|>samples/BenchmarkApp/Program.cs<|end_filename|>
using System;
using System.IO;
using System.Net;
using System.Runtime;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Console;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal;
using Benchmarks.Middleware;
using RedHat.AspNetCore.Server.Kestrel.Transport.Linux;
using System.Linq;
namespace SampleApp
{
public class Startup
{
public Startup()
{ }
public void Configure(IApplicationBuilder app, ILoggerFactory loggerFactory)
{
app.UsePlainText();
app.UseJson();
app.Run(async context =>
{
var response = $"hello, world{Environment.NewLine}";
context.Response.ContentLength = response.Length;
context.Response.ContentType = "text/plain";
await context.Response.WriteAsync(response);
});
}
public static void Main(string[] args)
{
var hostBuilder = new WebHostBuilder()
.UseKestrel(options =>
{
options.AllowSynchronousIO = true;
})
.UseLinuxTransport()
.UseStartup<Startup>();
var host = hostBuilder.Build();
host.Run();
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/TransportThreadState.cs<|end_filename|>
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
enum TransportThreadState
{
Initial,
Starting,
Started,
ClosingAccept,
AcceptClosed,
Stopping,
Stopped
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/Socket.cs<|end_filename|>
using System;
using System.Net;
using System.Runtime.InteropServices;
using System.Text;
using Tmds.Linux;
using static Tmds.Linux.LibC;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
struct SocketPair
{
public int Socket1;
public int Socket2;
public void Dispose()
{
if (Socket1 != -1)
{
IOInterop.Close(Socket1);
Socket1 = -1;
}
if (Socket2 != -1)
{
IOInterop.Close(Socket2);
Socket2 = -1;
}
}
}
static class SocketInterop
{
public static unsafe PosixResult Socket(int domain, int type, int protocol, bool blocking, out int s)
{
type |= SOCK_CLOEXEC;
if (!blocking)
{
type |= SOCK_NONBLOCK;
}
s = socket(domain, type, protocol);
return PosixResult.FromReturnValue(s);
}
public static PosixResult Socket(int domain, int type, int protocol, bool blocking, out Socket socket)
{
int socketFd;
PosixResult result = Socket(domain, type, protocol, blocking, out socketFd);
socket = result.IsSuccess ? new Socket(socketFd) : null;
return result;
}
public static unsafe PosixResult GetAvailableBytes(int socket)
{
int availableBytes;
int rv = ioctl(socket, FIONREAD, &availableBytes);
return PosixResult.FromReturnValue(rv == -1 ? rv : availableBytes);
}
public static PosixResult GetAvailableBytes(Socket socket)
=> GetAvailableBytes(socket.DangerousGetHandle().ToInt32());
public static unsafe PosixResult Accept(int socket, bool blocking, out int clientSocket)
{
int flags = SOCK_CLOEXEC;
if (!blocking)
{
flags |= SOCK_NONBLOCK;
}
int rv;
do
{
rv = accept4(socket, null, null, flags);
} while (rv < 0 && errno == EINTR);
clientSocket = rv;
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult Accept(Socket socket, bool blocking, out Socket clientSocket)
{
int clientSocketFd;
PosixResult result = Accept(socket.DangerousGetHandle().ToInt32(), blocking, out clientSocketFd);
clientSocket = result.IsSuccess ? new Socket(clientSocketFd) : null;
return result;
}
public static PosixResult Shutdown(Socket socket, int how)
=> Shutdown(socket.DangerousGetHandle().ToInt32(), how);
public static PosixResult Shutdown(int socket, int how)
{
int rv = shutdown(socket, how);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult Send(int socket, iovec* ioVectors, int ioVectorLen, int flags = 0)
{
msghdr hdr = default(msghdr);
hdr.msg_iov = ioVectors;
hdr.msg_iovlen = ioVectorLen;
flags |= MSG_NOSIGNAL;
ssize_t rv;
do
{
rv = sendmsg(socket, &hdr, flags);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult Send(SafeHandle socket, iovec* ioVectors, int ioVectorLen, int flags = 0)
=> Send(socket.DangerousGetHandle().ToInt32(), ioVectors, ioVectorLen, flags);
public static unsafe PosixResult Receive(int socket, iovec* ioVectors, int ioVectorLen)
{
msghdr hdr = default(msghdr);
hdr.msg_iov = ioVectors;
hdr.msg_iovlen = ioVectorLen;
int flags = MSG_NOSIGNAL;
ssize_t rv;
do
{
rv = recvmsg(socket, &hdr, flags);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult Receive(SafeHandle socket, iovec* ioVectors, int ioVectorLen)
=> Receive(socket.DangerousGetHandle().ToInt32(), ioVectors, ioVectorLen);
public static unsafe PosixResult SetSockOpt(int socket, int level, int optname, void* optval, socklen_t optlen)
{
int rv = setsockopt(socket, level, optname, optval, optlen);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult SetSockOpt(Socket socket, int level, int optname, void* optval, socklen_t optlen)
=> SetSockOpt(socket.DangerousGetHandle().ToInt32(), level, optname, optval, optlen);
public static unsafe PosixResult GetSockOpt(SafeHandle socket, int level, int optname, void* optval, socklen_t* optlen)
{
int rv = getsockopt(socket.DangerousGetHandle().ToInt32(), level, optname, optval, optlen);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult GetPeerName(int socket, sockaddr_storage* addr)
{
socklen_t sockLen = SizeOf.sockaddr_storage;
int rv = getpeername(socket, (sockaddr*)addr, &sockLen);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult GetPeerName(Socket socket, sockaddr_storage* addr)
=> GetPeerName(socket.DangerousGetHandle().ToInt32(), addr);
public static unsafe PosixResult GetSockName(int socket, sockaddr_storage* addr)
{
socklen_t sockLen = SizeOf.sockaddr_storage;
int rv = getsockname(socket, (sockaddr*)addr, &sockLen);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult GetSockName(Socket socket, sockaddr_storage* addr)
=> GetSockName(socket.DangerousGetHandle().ToInt32(), addr);
public static unsafe PosixResult SocketPair(int domain, int type, int protocol, bool blocking, out int socket1, out int socket2)
{
int* sv = stackalloc int[2];
type |= SOCK_CLOEXEC;
if (!blocking)
{
type |= SOCK_NONBLOCK;
}
int rv = socketpair(domain, type, protocol, sv);
if (rv == 0)
{
socket1 = sv[0];
socket2 = sv[1];
}
else
{
socket1 = -1;
socket2 = -1;
}
return PosixResult.FromReturnValue(rv);
}
public unsafe static PosixResult ReceiveSocket(int fromSocket, out int socket, bool blocking)
{
socket = -1;
byte dummyBuffer = 0;
iovec iov = default(iovec);
iov.iov_base = &dummyBuffer;
iov.iov_len = 1;
int controlLength = CMSG_SPACE(sizeof(int));
byte* control = stackalloc byte[controlLength];
msghdr header = default(msghdr);
header.msg_iov = &iov;
header.msg_iovlen = 1;
header.msg_control = control;
header.msg_controllen = controlLength;
int flags = MSG_NOSIGNAL | MSG_CMSG_CLOEXEC;
ssize_t rv;
do
{
rv = recvmsg(fromSocket, &header, flags);
} while (rv < 0 && errno == EINTR);
if (rv != -1)
{
for (cmsghdr* cmsg = CMSG_FIRSTHDR(&header); cmsg != null; cmsg = CMSG_NXTHDR(&header,cmsg))
{
if (cmsg->cmsg_level == SOL_SOCKET && cmsg->cmsg_type == SCM_RIGHTS)
{
int* fdptr = (int*)CMSG_DATA(cmsg);
socket = *fdptr;
flags = fcntl(socket, F_GETFL, 0);
if (blocking)
{
flags &= ~O_NONBLOCK;
}
else
{
flags |= O_NONBLOCK;
}
fcntl(socket, F_SETFL, flags);
break;
}
}
}
return PosixResult.FromReturnValue(rv);
}
public static PosixResult ReceiveSocket(Socket fromSocket, out Socket socket, bool blocking)
{
int receiveSocketFd;
PosixResult result = ReceiveSocket(fromSocket.DangerousGetHandle().ToInt32(), out receiveSocketFd, blocking);
socket = result.IsSuccess ? new Socket(receiveSocketFd) : null;
return result;
}
public unsafe static PosixResult AcceptAndSendHandleTo(Socket fromSocket, int toSocket)
{
int acceptFd = fromSocket.DangerousGetHandle().ToInt32();
ssize_t rv;
do
{
rv = accept4(acceptFd, null, null, SOCK_CLOEXEC);
} while (rv < 0 && errno == EINTR);
if (rv != -1)
{
int acceptedFd = (int)rv;
byte dummyBuffer = 0;
iovec iov = default(iovec);
iov.iov_base = &dummyBuffer;
iov.iov_len = 1;
int controlLength = CMSG_SPACE(sizeof(int));
byte* control = stackalloc byte[controlLength];
msghdr header = default(msghdr);
header.msg_iov = &iov;
header.msg_iovlen = 1;
header.msg_control = control;
header.msg_controllen = controlLength;
cmsghdr* cmsg = CMSG_FIRSTHDR(&header);
cmsg->cmsg_level = SOL_SOCKET;
cmsg->cmsg_type = SCM_RIGHTS;
cmsg->cmsg_len = CMSG_LEN(sizeof(int));
int *fdptr = (int*)CMSG_DATA(cmsg);
*fdptr = acceptedFd;
do
{
rv = sendmsg(toSocket, &header, MSG_NOSIGNAL);
} while (rv < 0 && errno == EINTR);
IOInterop.Close(acceptedFd);
}
return PosixResult.FromReturnValue(rv);
}
public unsafe static PosixResult CompleteZeroCopy(int socket)
{
msghdr msg = default(msghdr);
int controlLength = 100;
byte* control = stackalloc byte[controlLength];
do
{
msg.msg_control = control;
msg.msg_controllen = controlLength;
ssize_t rv;
do
{
rv = recvmsg(socket, &msg, MSG_NOSIGNAL| MSG_ERRQUEUE);
} while (rv < 0 && errno == EINTR);
if (rv == -1)
{
return PosixResult.FromReturnValue(rv);
}
cmsghdr* cm = CMSG_FIRSTHDR(&msg);
if (cm == null)
{
continue;
}
if (!((cm->cmsg_level == SOL_IP && cm->cmsg_type == IP_RECVERR) ||
(cm->cmsg_level == SOL_IPV6 && cm->cmsg_type == IPV6_RECVERR)))
{
continue;
}
sock_extended_err *serr = (sock_extended_err*)CMSG_DATA(cm);
if ((serr->ee_origin != SO_EE_ORIGIN_ZEROCOPY) ||
(serr->ee_errno != 0))
{
continue;
}
return new PosixResult(((serr->ee_code & SO_EE_CODE_ZEROCOPY_COPIED) != 0) ?
ZeroCopyCopied : ZeroCopySuccess);
} while (true);
}
public static unsafe PosixResult Disconnect(int socket)
{
sockaddr addr = default(sockaddr);
addr.sa_family = AF_UNSPEC;
int rv;
do
{
rv = connect(socket, &addr, SizeOf.sockaddr);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
public const int ZeroCopyCopied = 0;
public const int ZeroCopySuccess = 1;
public static unsafe PosixResult TryGetLocalIPAddress(Socket socket, out IPEndPointStruct ep, IPAddress reuseAddress = null)
=> TryGetLocalIPAddress(socket.DangerousGetHandle().ToInt32(), out ep, reuseAddress);
public static unsafe PosixResult TryGetLocalIPAddress(int socket, out IPEndPointStruct ep, IPAddress reuseAddress = null)
{
sockaddr_storage socketAddress;
var rv = SocketInterop.GetSockName(socket, &socketAddress);
if (rv.IsSuccess)
{
if (!ToIPEndPointStruct(&socketAddress, out ep, reuseAddress))
{
return new PosixResult(PosixResult.EINVAL);
}
}
else
{
ep = default(IPEndPointStruct);
}
return rv;
}
public static unsafe PosixResult TryGetPeerIPAddress(Socket socket, out IPEndPointStruct ep, IPAddress reuseAddress = null)
=> TryGetPeerIPAddress(socket.DangerousGetHandle().ToInt32(), out ep, reuseAddress);
public static unsafe PosixResult TryGetPeerIPAddress(int socket, out IPEndPointStruct ep, IPAddress reuseAddress = null)
{
sockaddr_storage socketAddress;
var rv = SocketInterop.GetPeerName(socket, &socketAddress);
if (rv.IsSuccess)
{
if (!ToIPEndPointStruct(&socketAddress, out ep, reuseAddress))
{
return new PosixResult(PosixResult.EINVAL);
}
}
else
{
ep = default(IPEndPointStruct);
}
return rv;
}
private static unsafe bool ToIPEndPointStruct(sockaddr_storage* addr, out IPEndPointStruct ep, IPAddress reuseAddress = null)
{
if (addr->ss_family == AF_INET)
{
sockaddr_in* addrIn = (sockaddr_in*)addr;
long value = ((addrIn->sin_addr.s_addr[3] << 24 | addrIn->sin_addr.s_addr[2] << 16 | addrIn->sin_addr.s_addr[1] << 8 | addrIn->sin_addr.s_addr[0]) & 0x0FFFFFFFF);
#pragma warning disable CS0618 // 'IPAddress.Address' is obsolete
bool matchesReuseAddress = reuseAddress != null && reuseAddress.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork && reuseAddress.Address == value;
#pragma warning restore CS0618
int port = ntohs(addrIn->sin_port);
ep = new IPEndPointStruct(matchesReuseAddress ? reuseAddress : new IPAddress(value), port);
return true;
}
else if (addr->ss_family == AF_INET6)
{
sockaddr_in6* addrIn = (sockaddr_in6*)addr;
// We can't check if we can use reuseAddress without allocating.
const int length = 16;
var bytes = new byte[length];
for (int i = 0; i < length; i++)
{
bytes[i] = addrIn->sin6_addr.s6_addr[i];
}
int port = ntohs(addrIn->sin6_port);
ep = new IPEndPointStruct(new IPAddress(bytes, addrIn->sin6_scope_id), port);
return true;
}
else
{
ep = default(IPEndPointStruct);
return false;
}
}
}
// Warning: Some operations use DangerousGetHandle for increased performance
unsafe class Socket : CloseSafeHandle
{
private Socket()
{}
public Socket(int handle) :
base(handle)
{}
public static Socket Create(int domain, int type, int protocol, bool blocking)
{
Socket socket;
var result = SocketInterop.Socket(domain, type, protocol, blocking, out socket);
result.ThrowOnError();
return socket;
}
public int GetAvailableBytes()
{
var result = TryGetAvailableBytes();
result.ThrowOnError();
return result.IntValue;
}
public PosixResult TryGetAvailableBytes()
{
return SocketInterop.GetAvailableBytes(this);
}
public void Bind(string unixPath)
{
TryBind(unixPath)
.ThrowOnError();
}
public unsafe PosixResult TryBind(string unixPath)
{
sockaddr_un addr;
GetSockaddrUn(unixPath, out addr);
int rv = bind(DangerousGetHandle().ToInt32(), (sockaddr*)&addr, SizeOf.sockaddr_un);
return PosixResult.FromReturnValue(rv);
}
public void Bind(IPEndPointStruct endpoint)
{
TryBind(endpoint)
.ThrowOnError();
}
public unsafe PosixResult TryBind(IPEndPointStruct endpoint)
{
sockaddr_storage addr;
GetSockaddrInet(endpoint, &addr, out int length);
int rv = bind(DangerousGetHandle().ToInt32(), (sockaddr*)&addr, length);
return PosixResult.FromReturnValue(rv);
}
public void Connect(IPEndPointStruct endpoint)
{
TryConnect(endpoint)
.ThrowOnError();
}
public unsafe PosixResult TryConnect(IPEndPointStruct endpoint)
{
sockaddr_storage addr;
GetSockaddrInet(endpoint, &addr, out int length);
int rv;
do
{
rv = connect(DangerousGetHandle().ToInt32(), (sockaddr*)&addr, length);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
public void Connect(string unixPath)
{
TryConnect(unixPath)
.ThrowOnError();
}
public unsafe PosixResult TryConnect(string unixPath)
{
sockaddr_un addr;
GetSockaddrUn(unixPath, out addr);
int rv;
do
{
rv = connect(DangerousGetHandle().ToInt32(), (sockaddr*)&addr, SizeOf.sockaddr_un);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
private static void GetSockaddrUn(string unixPath, out sockaddr_un addr)
{
addr = default(sockaddr_un);
addr.sun_family = AF_UNIX;
var bytes = Encoding.UTF8.GetBytes(unixPath);
int length = Math.Min(bytes.Length, sockaddr_un.sun_path_length - 1);
fixed (byte* pathBytes = bytes)
{
for (int i = 0; i < length; i++)
{
addr.sun_path[i] = bytes[i];
}
}
addr.sun_path[length] = 0;
}
internal static unsafe void GetSockaddrInet(IPEndPointStruct inetAddress, sockaddr_storage* addr, out int length)
{
if (inetAddress.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
{
sockaddr_in* addrIn = (sockaddr_in*)addr;
addrIn->sin_family = AF_INET;
addrIn->sin_port = htons((ushort)inetAddress.Port);
int bytesWritten;
inetAddress.Address.TryWriteBytes(new Span<byte>(addrIn->sin_addr.s_addr, 4), out bytesWritten);
length = SizeOf.sockaddr_in;
}
else if (inetAddress.AddressFamily == System.Net.Sockets.AddressFamily.InterNetworkV6)
{
sockaddr_in6* addrIn = (sockaddr_in6*)addr;
addrIn->sin6_family = AF_INET6;
addrIn->sin6_port = htons((ushort)inetAddress.Port);
addrIn->sin6_flowinfo = 0;
addrIn->sin6_scope_id = 0;
int bytesWritten;
inetAddress.Address.TryWriteBytes(new Span<byte>(addrIn->sin6_addr.s6_addr, 16), out bytesWritten);
length = SizeOf.sockaddr_in6;
}
else
{
length = 0;
}
}
public void Listen(int backlog)
{
TryListen(backlog)
.ThrowOnError();
}
public PosixResult TryListen(int backlog)
{
int rv = listen(DangerousGetHandle().ToInt32(), backlog);
return PosixResult.FromReturnValue(rv);
}
public unsafe Socket Accept(bool blocking)
{
Socket clientSocket;
var result = TryAccept(out clientSocket, blocking);
result.ThrowOnError();
return clientSocket;
}
public unsafe PosixResult TryAccept(out Socket clientSocket, bool blocking)
{
return SocketInterop.Accept(this, blocking, out clientSocket);
}
public int Receive(ArraySegment<byte> buffer)
{
var result = TryReceive(buffer);
result.ThrowOnError();
return result.IntValue;
}
public unsafe PosixResult TryReceive(ArraySegment<byte> buffer)
{
ValidateSegment(buffer);
fixed (byte* buf = buffer.Array)
{
iovec ioVector = new iovec() { iov_base = buf + buffer.Offset, iov_len = buffer.Count };
return SocketInterop.Receive(this, &ioVector, 1);
}
}
public unsafe ssize_t Receive(iovec* ioVectors, int ioVectorLen)
{
var result = TryReceive(ioVectors, ioVectorLen);
result.ThrowOnError();
return result.Value;
}
public unsafe PosixResult TryReceive(iovec* ioVectors, int ioVectorLen)
{
return SocketInterop.Receive(this, ioVectors, ioVectorLen);
}
public void Shutdown(int shutdown)
{
TryShutdown(shutdown)
.ThrowOnError();
}
public PosixResult TryShutdown(int shutdown)
{
return SocketInterop.Shutdown(this, shutdown);
}
public int Send(ArraySegment<byte> buffer)
{
var result = TrySend(buffer);
result.ThrowOnError();
return result.IntValue;
}
public unsafe PosixResult TrySend(ArraySegment<byte> buffer)
{
ValidateSegment(buffer);
fixed (byte* buf = buffer.Array)
{
iovec ioVector = new iovec() { iov_base = buf + buffer.Offset, iov_len = buffer.Count };
return SocketInterop.Send(this, &ioVector, 1);
}
}
public unsafe ssize_t Send(iovec* ioVectors, int ioVectorLen)
{
var result = TrySend(ioVectors, ioVectorLen);
result.ThrowOnError();
return result.Value;
}
public unsafe PosixResult TrySend(iovec* ioVectors, int ioVectorLen)
{
return SocketInterop.Send(this, ioVectors, ioVectorLen);
}
public void SetSocketOption(int level, int optname, int value)
{
TrySetSocketOption(level, optname, value)
.ThrowOnError();
}
public unsafe PosixResult TrySetSocketOption(int level, int optname, int value)
{
return SocketInterop.SetSockOpt(this, level, optname, (byte*)&value, 4);
}
// TODO: rename to GetSocketOptionInt
public int GetSocketOption(int level, int optname)
{
int value = 0;
var result = TryGetSocketOption(level, optname, ref value);
result.ThrowOnError();
return value;
}
public unsafe PosixResult TryGetSocketOption(int level, int optname, ref int value)
{
int v = 0;
socklen_t length = 4;
var rv = SocketInterop.GetSockOpt(this, level, optname, (byte*)&v, &length);
if (rv.IsSuccess)
{
value = v;
}
return rv;
}
public IPEndPointStruct GetLocalIPAddress(IPAddress reuseAddress = null)
{
IPEndPointStruct ep;
TryGetLocalIPAddress(out ep, reuseAddress)
.ThrowOnError();
return ep;
}
public unsafe PosixResult TryGetLocalIPAddress(out IPEndPointStruct ep, IPAddress reuseAddress = null)
=> SocketInterop.TryGetLocalIPAddress(this, out ep, reuseAddress);
public IPEndPointStruct GetPeerIPAddress()
{
IPEndPointStruct ep;
TryGetPeerIPAddress(out ep)
.ThrowOnError();
return ep;
}
public unsafe PosixResult TryGetPeerIPAddress(out IPEndPointStruct ep)
=> SocketInterop.TryGetPeerIPAddress(this, out ep);
private static void ValidateSegment(ArraySegment<byte> segment)
{
// ArraySegment<byte> is not nullable.
if (segment.Array == null)
{
throw new ArgumentNullException(nameof(segment));
}
// Length zero is explicitly allowed
if (segment.Offset < 0 || segment.Count < 0 || segment.Count > (segment.Array.Length - segment.Offset))
{
throw new ArgumentOutOfRangeException(nameof(segment));
}
}
public static SocketPair CreatePair(int domain, int type, int protocol, bool blocking)
{
int socket1;
int socket2;
var result = SocketInterop.SocketPair(domain, type, protocol, blocking, out socket1, out socket2);
return new SocketPair { Socket1 = socket1, Socket2 = socket2 };
}
public unsafe PosixResult TryReceiveSocket(out Socket socket, bool blocking)
{
return SocketInterop.ReceiveSocket(this, out socket, blocking);
}
public unsafe PosixResult TryAcceptAndSendHandleTo(int toSocket)
{
return SocketInterop.AcceptAndSendHandleTo(this, toSocket);
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/EPoll.cs<|end_filename|>
using System;
using System.Runtime.InteropServices;
using System.Threading;
using static Tmds.Linux.LibC;
using Tmds.Linux;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
static class EPollInterop
{
public static PosixResult EPollCreate(out EPoll epoll)
{
epoll = new EPoll();
int rv = epoll_create1(EPOLL_CLOEXEC);
if (rv == -1)
{
epoll = null;
}
else
{
epoll.SetHandle(rv);
}
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult EPollWait(int epoll, epoll_event* events, int maxEvents, int timeout)
{
int rv;
do
{
rv = epoll_wait(epoll, events, maxEvents, timeout);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
public static unsafe PosixResult EPollWait(EPoll epoll, epoll_event* events, int maxEvents, int timeout)
=> EPollWait(epoll.DangerousGetHandle().ToInt32(), events, maxEvents, timeout);
public static unsafe PosixResult EPollControl(int epoll, int operation, int fd, int events, int data)
{
epoll_event ev = default(epoll_event);
ev.events = events;
ev.data.fd = data;
int rv = epoll_ctl(epoll, operation, fd, &ev);
return PosixResult.FromReturnValue(rv);
}
public static PosixResult EPollControl(EPoll epoll, int operation, SafeHandle fd, int events, int data)
=> EPollControl(epoll.DangerousGetHandle().ToInt32(), operation, fd.DangerousGetHandle().ToInt32(), events, data);
}
// Warning: Some operations use DangerousGetHandle for increased performance
class EPoll : CloseSafeHandle
{
public const int TimeoutInfinite = -1;
internal EPoll()
{}
public static EPoll Create()
{
EPoll epoll;
var result = EPollInterop.EPollCreate(out epoll);
result.ThrowOnError();
return epoll;
}
public void Control(int operation, SafeHandle fd, int events, int data)
{
TryControl(operation, fd, events, data)
.ThrowOnError();
}
public PosixResult TryControl(int operation, SafeHandle fd, int events, int data)
{
return EPollInterop.EPollControl(this, operation, fd, events, data);
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/SystemScheduler.cs<|end_filename|>
using System.Runtime.InteropServices;
using Tmds.Linux;
using static Tmds.Linux.LibC;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
class SchedulerInterop
{
public unsafe static PosixResult SetCurrentThreadAffinity(int cpuId)
{
cpu_set_t cpu_set;
CPU_ZERO(&cpu_set);
CPU_SET(cpuId, &cpu_set);
int rv = sched_setaffinity(0, SizeOf.cpu_set_t, &cpu_set);
return PosixResult.FromReturnValue(rv);
}
public unsafe static PosixResult ClearCurrentThreadAffinity()
{
cpu_set_t cpu_set;
CPU_ZERO(&cpu_set);
for (int cpuId = 0; cpuId < CPU_SETSIZE; cpuId++)
{
CPU_SET(cpuId, &cpu_set);
}
int rv = sched_setaffinity(0, SizeOf.cpu_set_t, &cpu_set);
return PosixResult.FromReturnValue(rv);
}
public unsafe static PosixResult GetAvailableCpusForProcess()
{
cpu_set_t set;
int rv = sched_getaffinity (getpid(), SizeOf.cpu_set_t, &set);
if (rv == 0)
{
rv = CPU_COUNT (&set);
}
return PosixResult.FromReturnValue(rv);
}
}
class SystemScheduler
{
public static PosixResult TrySetCurrentThreadAffinity(int cpuId)
{
return SchedulerInterop.SetCurrentThreadAffinity(cpuId);
}
public static void SetCurrentThreadAffinity(int cpuId)
{
TrySetCurrentThreadAffinity(cpuId)
.ThrowOnError();
}
public static PosixResult TryClearCurrentThreadAffinity()
{
return SchedulerInterop.ClearCurrentThreadAffinity();
}
public static void ClearCurrentThreadAffinity()
{
TryClearCurrentThreadAffinity()
.ThrowOnError();
}
public static int GetAvailableCpusForProcess()
{
var result = SchedulerInterop.GetAvailableCpusForProcess();
result.ThrowOnError();
return result.IntValue;
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/WebHostBuilderLinuxExtensions.cs<|end_filename|>
using System;
using System.Runtime.InteropServices;
using Microsoft.AspNetCore.Connections;
using Microsoft.Extensions.DependencyInjection;
using RedHat.AspNetCore.Server.Kestrel.Transport.Linux;
namespace Microsoft.AspNetCore.Hosting
{
public static class WebHostBuilderLinuxTransportExtensions
{
public static IWebHostBuilder UseLinuxTransport(this IWebHostBuilder hostBuilder)
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
return hostBuilder;
}
return hostBuilder.ConfigureServices(services =>
{
services.AddSingleton<IConnectionListenerFactory, LinuxTransportFactory>();
});
}
public static IWebHostBuilder UseLinuxTransport(this IWebHostBuilder hostBuilder, Action<LinuxTransportOptions> options)
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
return hostBuilder;
}
return hostBuilder.UseLinuxTransport().ConfigureServices(services =>
{
services.Configure(options);
});
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/AcceptThread.cs<|end_filename|>
using System;
using System.IO;
using System.Threading.Tasks;
using System.Threading;
using Microsoft.AspNetCore.Connections;
using static Tmds.Linux.LibC;
using Tmds.Linux;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
sealed class AcceptThread : ITransportActionHandler
{
private enum State
{
Initial,
Started,
Stopped
}
private Socket _socket;
private State _state;
private readonly object _gate = new object();
private TaskCompletionSource<object> _stoppedTcs;
private Thread _thread;
private PipeEndPair _pipeEnds;
private int[] _handlers;
public AcceptThread(Socket socket)
{
_socket = socket;
_state = State.Initial;
_handlers = Array.Empty<int>();
}
public int CreateReceiveSocket()
{
lock (_gate)
{
if (_state != State.Initial)
{
throw new InvalidOperationException($"Invalid operation: {_state}");
}
var pair = Socket.CreatePair(AF_UNIX, SOCK_STREAM, 0, blocking: false);
var updatedHandlers = new int[_handlers.Length + 1];
Array.Copy(_handlers, updatedHandlers, _handlers.Length);
updatedHandlers[updatedHandlers.Length - 1] = pair.Socket1;
_handlers = updatedHandlers;
return pair.Socket2;
}
}
public Task BindAsync()
{
lock (_gate)
{
if (_state != State.Initial)
{
throw new InvalidOperationException($"Invalid operation: {_state}");
}
_stoppedTcs = new TaskCompletionSource<object>();
try
{
_pipeEnds = PipeEnd.CreatePair(blocking: false);
_thread = new Thread(AcceptThreadStart);;
_thread.Start();
_state = State.Started;
}
catch (System.Exception)
{
_state = State.Stopped;
_stoppedTcs = null;
_socket.Dispose();
Cleanup();
throw;
}
}
return Task.CompletedTask;
}
public Task UnbindAsync()
{
lock (_gate)
{
if (_state != State.Stopped)
{
_state = State.Stopped;
try
{
_pipeEnds.WriteEnd?.WriteByte(0);
}
catch (IOException ex) when (ex.HResult == EPIPE)
{}
catch (ObjectDisposedException)
{}
}
return (Task)_stoppedTcs?.Task ?? Task.CompletedTask;
}
}
public Task StopAsync()
=> UnbindAsync();
private void Cleanup()
{
_pipeEnds.Dispose();
foreach (var handler in _handlers)
{
IOInterop.Close(handler);
}
}
public ValueTask<ConnectionContext> AcceptAsync(CancellationToken cancellationToken = default)
{
return default;
}
private unsafe void AcceptThreadStart(object state)
{
try
{
var socket = _socket;
using (socket)
{
using (EPoll epoll = EPoll.Create())
{
int epollFd = epoll.DangerousGetHandle().ToInt32();
const int acceptKey = 0;
const int pipeKey = 1;
// accept socket
epoll.Control(EPOLL_CTL_ADD, _socket, EPOLLIN, acceptKey);
// add pipe
epoll.Control(EPOLL_CTL_ADD, _pipeEnds.ReadEnd, EPOLLIN, pipeKey);
epoll_event ev;
bool running = true;
int nextHandler = 0;
var handlers = _handlers;
do
{
int numEvents = EPollInterop.EPollWait(epollFd, &ev, 1, timeout: EPoll.TimeoutInfinite).IntValue;
if (numEvents == 1)
{
if (ev.data.fd == acceptKey)
{
var handler = handlers[nextHandler];
nextHandler = (nextHandler + 1) % handlers.Length;
socket.TryAcceptAndSendHandleTo(handler);
}
else
{
running = false;
}
}
} while (running);
}
}
_stoppedTcs.TrySetResult(null);
}
catch (Exception e)
{
_stoppedTcs.SetException(e);
}
finally
{
Cleanup();
}
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/CloseSafeHandle.cs<|end_filename|>
// Copyright 2017 <NAME> <<EMAIL>>
// This software is made available under the MIT License
// See COPYING for details
using System;
using System.Runtime.InteropServices;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
internal class CloseSafeHandle : SafeHandle
{
public CloseSafeHandle()
: base(new IntPtr(-1), true)
{}
protected CloseSafeHandle(int handle)
: base(new IntPtr(handle), true)
{}
internal void SetHandle(int descriptor)
{
base.SetHandle((IntPtr)descriptor);
}
public override bool IsInvalid
{
get { return handle == new IntPtr(-1); }
}
protected override bool ReleaseHandle()
{
var result = IOInterop.Close(handle.ToInt32());
return result.IsSuccess;
}
protected unsafe PosixResult TryWrite(byte* buffer, int length)
{
return IOInterop.Write(this, buffer, length);
}
protected unsafe PosixResult TryRead(byte* buffer, int length)
{
return IOInterop.Read(this, buffer, length);
}
protected unsafe PosixResult TryWrite(ArraySegment<byte> buffer)
{
// TODO: validate buffer
fixed (byte* buf = buffer.Array)
{
return IOInterop.Write(this, buf + buffer.Offset, buffer.Count);
}
}
protected unsafe PosixResult TryRead(ArraySegment<byte> buffer)
{
// TODO: validate buffer
fixed (byte* buf = buffer.Array)
{
return IOInterop.Read(this, buf + buffer.Offset, buffer.Count);
}
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/Aio.cs<|end_filename|>
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using Tmds.Linux;
using static Tmds.Linux.LibC;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
static class AioInterop
{
public unsafe static PosixResult IoSetup(int nr, aio_context_t* ctx)
{
int rv = io_setup((uint)nr, ctx);
return PosixResult.FromReturnValue(rv);
}
public unsafe static PosixResult IoDestroy(aio_context_t ctx)
{
int rv = io_destroy(ctx);
return PosixResult.FromReturnValue(rv);
}
public unsafe static PosixResult IoSubmit(aio_context_t ctx, int nr, iocb** iocbpp)
{
int rv = io_submit(ctx, nr, iocbpp);
return PosixResult.FromReturnValue(rv);
}
public unsafe static PosixResult IoGetEvents(aio_context_t ctx, int min_nr, int nr, io_event* events, int timeoutMs)
{
timespec timeout = default(timespec);
bool hasTimeout = timeoutMs >= 0;
if (hasTimeout)
{
timeout.tv_sec = timeoutMs / 1000;
timeout.tv_nsec = 1000 * (timeoutMs % 1000);
}
int rv;
do
{
rv = io_getevents(ctx, min_nr, nr, events, hasTimeout ? &timeout : null);
} while (rv < 0 && errno == EINTR);
return PosixResult.FromReturnValue(rv);
}
public unsafe static PosixResult IoGetEvents(aio_context_t ctx, int nr, io_event* events)
{
aio_ring* pRing = ctx.ring;
if (nr <= 0)
{
return new PosixResult(PosixResult.EINVAL);
}
if (pRing->magic == 0xa10a10a1 && pRing->incompat_features == 0)
{
int head = (int)pRing->head;
int tail = (int)pRing->tail;
int available = tail - head;
if (available < 0)
{
available += (int)pRing->nr;
}
if (available >= nr)
{
io_event* ringEvents = (io_event*)((byte*)pRing + pRing->header_length);
io_event* start = ringEvents + head;
io_event* end = start + nr;
if (head + nr > pRing->nr)
{
end -= pRing->nr;
}
if (end > start)
{
Copy(start, end, events);
}
else
{
io_event* eventsEnd = Copy(start, ringEvents + pRing->nr, events);
Copy(ringEvents, end, eventsEnd);
}
head += nr;
if (head >= pRing->nr)
{
head -= (int)pRing->nr;
}
pRing->head = (uint)head;
return new PosixResult(nr);
}
}
return IoGetEvents(ctx, nr, nr, events, -1);
}
private static unsafe io_event* Copy(io_event* start, io_event* end, io_event* dst)
{
uint byteCount = (uint)((byte*)end - (byte*)start);
Unsafe.CopyBlock(dst, start, byteCount);
return (io_event*)((byte*)dst + byteCount);
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/CpuSetTypeConverter.cs<|end_filename|>
using System;
using System.ComponentModel;
using System.Globalization;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
internal class CpuSetTypeConverter : TypeConverter
{
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof(string))
{
return true;
}
return base.CanConvertFrom(context, sourceType);
}
public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value)
{
if (value is string)
{
return CpuSet.Parse((string)value);
}
return base.ConvertFrom(context, culture, value);
}
public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType)
{
if (destinationType == typeof(string))
{
return value.ToString();
}
return base.ConvertTo(context, culture, value, destinationType);
}
}
}
<|start_filename|>src/RedHat.AspNetCore.Server.Kestrel.Transport.Linux/Transport.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.Extensions.Logging;
using static Tmds.Linux.LibC;
namespace RedHat.AspNetCore.Server.Kestrel.Transport.Linux
{
internal class Transport : IConnectionListener
{
private enum State
{
Created,
Binding,
Bound,
Unbinding,
Unbound,
Stopping,
Stopped
}
// Kestrel LibuvConstants.ListenBacklog
private const int ListenBacklog = 128;
private readonly LinuxTransportOptions _transportOptions;
private readonly ILoggerFactory _loggerFactory;
private readonly ILogger _logger;
private State _state;
private readonly object _gate = new object();
private ITransportActionHandler[] _threads;
private IAsyncEnumerator<ConnectionContext> _acceptEnumerator;
public EndPoint EndPoint { get; }
public Transport(EndPoint endPoint, LinuxTransportOptions transportOptions, ILoggerFactory loggerFactory)
{
if (transportOptions == null)
{
throw new ArgumentException(nameof(transportOptions));
}
if (loggerFactory == null)
{
throw new ArgumentException(nameof(loggerFactory));
}
if (endPoint == null)
{
throw new ArgumentException(nameof(endPoint));
}
EndPoint = endPoint;
_transportOptions = transportOptions;
_loggerFactory = loggerFactory;
_logger = loggerFactory.CreateLogger<Transport>();
_threads = Array.Empty<TransportThread>();
}
public async Task BindAsync()
{
AcceptThread acceptThread;
TransportThread[] transportThreads;
lock (_gate)
{
if (_state != State.Created)
{
ThrowInvalidOperation();
}
_state = State.Binding;
switch (EndPoint)
{
case IPEndPoint ipEndPoint:
acceptThread = null;
transportThreads = CreateTransportThreads(ipEndPoint, acceptThread: null);
break;
case UnixDomainSocketEndPoint unixDomainSocketEndPoint:
var socketPath = unixDomainSocketEndPoint.ToString();
var unixDomainSocket = Socket.Create(AF_UNIX, SOCK_STREAM, 0, blocking: false);
File.Delete(socketPath);
unixDomainSocket.Bind(socketPath);
unixDomainSocket.Listen(ListenBacklog);
acceptThread = new AcceptThread(unixDomainSocket);
transportThreads = CreateTransportThreads(ipEndPoint: null, acceptThread);
break;
case FileHandleEndPoint fileHandleEndPoint:
var fileHandleSocket = new Socket((int)fileHandleEndPoint.FileHandle);
acceptThread = new AcceptThread(fileHandleSocket);
transportThreads = CreateTransportThreads(ipEndPoint: null, acceptThread);
break;
default:
throw new NotSupportedException($"Unknown ListenType: {EndPoint.GetType()}.");
}
_threads = new ITransportActionHandler[transportThreads.Length + (acceptThread != null ? 1 : 0)];
_threads[0] = acceptThread;
for (int i = 0; i < transportThreads.Length; i++)
{
_threads[i + (acceptThread == null ? 0 : 1)] = transportThreads[i];
}
_logger.LogDebug($@"BindAsync {EndPoint}: TC:{_transportOptions.ThreadCount} TA:{_transportOptions.SetThreadAffinity} IC:{_transportOptions.ReceiveOnIncomingCpu} DA:{_transportOptions.DeferAccept}");
}
var tasks = new Task[transportThreads.Length];
for (int i = 0; i < transportThreads.Length; i++)
{
tasks[i] = transportThreads[i].BindAsync();
}
try
{
await Task.WhenAll(tasks);
if (acceptThread != null)
{
await acceptThread.BindAsync();
}
_acceptEnumerator = AcceptConnections();
lock (_gate)
{
if (_state == State.Binding)
{
_state = State.Bound;
}
else
{
ThrowInvalidOperation();
}
}
}
catch
{
await DisposeAsync();
throw;
}
}
public async ValueTask<ConnectionContext> AcceptAsync(CancellationToken cancellationToken = default)
{
if (cancellationToken.CanBeCanceled)
{
throw new NotImplementedException("AcceptAsync does not currently support cancellation via a token.");
}
lock (_gate)
{
if (_state >= State.Stopping)
{
throw new ObjectDisposedException(GetType().FullName);
}
}
if (await _acceptEnumerator.MoveNextAsync())
{
return _acceptEnumerator.Current;
}
// null means we're done...
return null;
}
private static int s_threadId = 0;
private TransportThread[] CreateTransportThreads(IPEndPoint ipEndPoint, AcceptThread acceptThread)
{
var threads = new TransportThread[_transportOptions.ThreadCount];
IList<int> preferredCpuIds = null;
if (_transportOptions.SetThreadAffinity)
{
preferredCpuIds = GetPreferredCpuIds();
}
int cpuIdx = 0;
for (int i = 0; i < _transportOptions.ThreadCount; i++)
{
int cpuId = preferredCpuIds == null ? -1 : preferredCpuIds[cpuIdx++ % preferredCpuIds.Count];
int threadId = Interlocked.Increment(ref s_threadId);
var thread = new TransportThread(ipEndPoint, _transportOptions, acceptThread, threadId, cpuId, _loggerFactory);
threads[i] = thread;
}
return threads;
}
private IList<int> GetPreferredCpuIds()
{
if (!_transportOptions.CpuSet.IsEmpty)
{
return _transportOptions.CpuSet.Cpus;
}
var ids = new List<int>();
bool found = true;
int level = 0;
do
{
found = false;
foreach (var socket in CpuInfo.GetSockets())
{
var cores = CpuInfo.GetCores(socket);
foreach (var core in cores)
{
var cpuIdIterator = CpuInfo.GetCpuIds(socket, core).GetEnumerator();
int d = 0;
while (cpuIdIterator.MoveNext())
{
if (d++ == level)
{
ids.Add(cpuIdIterator.Current);
found = true;
break;
}
}
}
}
level++;
} while (found && ids.Count < _transportOptions.ThreadCount);
return ids;
}
public async ValueTask UnbindAsync(CancellationToken cancellationToken = default)
{
lock (_gate)
{
if (_state <= State.Unbinding)
{
_state = State.Unbinding;
}
else
{
return;
}
}
var tasks = new Task[_threads.Length];
for (int i = 0; i < _threads.Length; i++)
{
tasks[i] = _threads[i].UnbindAsync();
}
await Task.WhenAll(tasks);
lock (_gate)
{
if (_state == State.Unbinding)
{
_state = State.Unbound;
}
else
{
ThrowInvalidOperation();
}
}
}
public async ValueTask DisposeAsync()
{
lock (_gate)
{
if (_state <= State.Stopping)
{
_state = State.Stopping;
}
else
{
return;
}
}
var tasks = new Task[_threads.Length];
for (int i = 0; i < _threads.Length; i++)
{
tasks[i] = _threads[i].StopAsync();
}
await Task.WhenAll(tasks);
lock (_gate)
{
if (_state == State.Stopping)
{
_state = State.Stopped;
}
else
{
ThrowInvalidOperation();
}
}
}
private async IAsyncEnumerator<ConnectionContext> AcceptConnections()
{
var slots = new Task<(ConnectionContext, int)>[_threads.Length];
// This is the task we'll put in the slot when each listening completes. It'll prevent
// us from having to shrink the array. We'll just loop while there are active slots.
var incompleteTask = new TaskCompletionSource<(ConnectionContext, int)>().Task;
var remainingSlots = slots.Length;
// Issue parallel accepts on all listeners
for (int i = 0; i < remainingSlots; i++)
{
slots[i] = AcceptAsync(_threads[i], i);
}
while (remainingSlots > 0)
{
// Calling GetAwaiter().GetResult() is safe because we know the task is completed
(var connection, var slot) = (await Task.WhenAny(slots)).GetAwaiter().GetResult();
// If the connection is null then the listener was closed
if (connection == null)
{
remainingSlots--;
slots[slot] = incompleteTask;
}
else
{
// Fill that slot with another accept and yield the connection
slots[slot] = AcceptAsync(_threads[slot], slot);
yield return connection;
}
}
static async Task<(ConnectionContext, int)> AcceptAsync(ITransportActionHandler transportThread, int slot)
{
return (await transportThread.AcceptAsync(), slot);
}
}
private void ThrowInvalidOperation()
{
throw new InvalidOperationException($"Invalid operation: {_state}");
}
}
} | Zorus/kestrel-linux-transport |
<|start_filename|>app/src/main/java/com/tinbytes/samples/showhidetoolbar/MainActivity2.java<|end_filename|>
/*
* Copyright 2015, <NAME> & Tinbytes, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.tinbytes.samples.showhidetoolbar;
import android.annotation.TargetApi;
import android.os.Build;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.tinbytes.samples.showhidetoolbar.util.CityUtils;
import com.tinbytes.samples.showhidetoolbar.util.HelpUtils;
import com.tinbytes.samples.showhidetoolbar.util.RecyclerViewUtils;
public class MainActivity2 extends AppCompatActivity {
// We need a reference to save/restore its state
private RecyclerViewUtils.ShowHideToolbarOnScrollingListener showHideToolbarListener;
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main_activity);
// Assign Toolbar to the activity
Toolbar tToolbar = (Toolbar) findViewById(R.id.tToolbar);
setSupportActionBar(tToolbar);
getSupportActionBar().setTitle(R.string.app_name);
// RecyclerView with sample data
RecyclerView rvCities = (RecyclerView) findViewById(R.id.rvCities);
rvCities.setLayoutManager(new LinearLayoutManager(this));
rvCities.setAdapter(new CitiesAdapter(CityUtils.CITIES));
rvCities.addOnScrollListener(showHideToolbarListener = new RecyclerViewUtils.ShowHideToolbarOnScrollingListener(tToolbar));
if (savedInstanceState != null) {
showHideToolbarListener.onRestoreInstanceState((RecyclerViewUtils.ShowHideToolbarOnScrollingListener.State) savedInstanceState
.getParcelable(RecyclerViewUtils.ShowHideToolbarOnScrollingListener.SHOW_HIDE_TOOLBAR_LISTENER_STATE));
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putParcelable(RecyclerViewUtils.ShowHideToolbarOnScrollingListener.SHOW_HIDE_TOOLBAR_LISTENER_STATE,
showHideToolbarListener.onSaveInstanceState());
super.onSaveInstanceState(outState);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_about:
HelpUtils.showAbout(this);
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Cities adapter to hold sample data for our RecyclerView.
*/
static class CitiesAdapter extends RecyclerView.Adapter<CitiesAdapter.ViewHolder> {
private String[] data;
static class ViewHolder extends RecyclerView.ViewHolder {
TextView tvName;
ViewHolder(View v) {
super(v);
tvName = (TextView) v.findViewById(R.id.tvName);
}
}
CitiesAdapter(String... data) {
this.data = data;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new ViewHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.city_item, parent, false));
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
holder.tvName.setText(data[position]);
}
@Override
public int getItemCount() {
return data.length;
}
}
}
<|start_filename|>app/src/main/java/com/tinbytes/samples/showhidetoolbar/util/HelpUtils.java<|end_filename|>
/*
* Copyright 2015, <NAME> & Tinbytes, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.tinbytes.samples.showhidetoolbar.util;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.text.Html;
import android.text.SpannableStringBuilder;
import android.text.method.LinkMovementMethod;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
import com.tinbytes.samples.showhidetoolbar.R;
/**
* Helper class to show a minimalistic About dialog.
*/
public final class HelpUtils {
private static final String ABOUT_DIALOG_TAG = "about_dialog";
public static void showAbout(Activity activity) {
FragmentManager fm = activity.getFragmentManager();
FragmentTransaction ft = fm.beginTransaction();
Fragment prev = fm.findFragmentByTag(ABOUT_DIALOG_TAG);
if (prev != null) {
ft.remove(prev);
}
ft.addToBackStack(null);
new AboutDialog().show(ft, "about_dialog");
}
public static class AboutDialog extends DialogFragment {
private static final String VERSION_UNAVAILABLE = "N/A";
public AboutDialog() {
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
// Get app version
PackageManager pm = getActivity().getPackageManager();
String packageName = getActivity().getPackageName();
String versionName;
try {
PackageInfo info = pm.getPackageInfo(packageName, 0);
versionName = info.versionName;
} catch (PackageManager.NameNotFoundException e) {
versionName = VERSION_UNAVAILABLE;
}
SpannableStringBuilder aboutBody = new SpannableStringBuilder();
aboutBody.append(Html.fromHtml(getString(R.string.about_body, versionName)));
LayoutInflater li = (LayoutInflater) getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View v = li.inflate(R.layout.about_dialog, null);
TextView tvAbout = (TextView) v.findViewById(R.id.tvAbout);
tvAbout.setText(aboutBody);
tvAbout.setMovementMethod(new LinkMovementMethod());
return new AlertDialog.Builder(getActivity())
.setTitle(R.string.about)
.setView(v)
.setPositiveButton(android.R.string.ok,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
}
}
).create();
}
}
}
<|start_filename|>app/src/main/java/com/tinbytes/samples/showhidetoolbar/util/RecyclerViewUtils.java<|end_filename|>
/*
* Copyright 2015, <NAME> & Tinbytes, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.tinbytes.samples.showhidetoolbar.util;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.annotation.TargetApi;
import android.os.Build;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.animation.LinearInterpolator;
/**
* Helper class for RecyclerView/Toolbar scroll listener.
*/
public final class RecyclerViewUtils {
/**
* This class simplifies the hide/show Toolbar animation depicted in MainActivity.java.
* Check MainActivity2.java to see how to use it.
*/
public static class ShowHideToolbarOnScrollingListener extends RecyclerView.OnScrollListener {
public static final String SHOW_HIDE_TOOLBAR_LISTENER_STATE = "show-hide-toolbar-listener-state";
// The elevation of the toolbar when content is scrolled behind
private static final float TOOLBAR_ELEVATION = 14f;
private Toolbar toolbar;
private State state;
public ShowHideToolbarOnScrollingListener(Toolbar toolbar) {
this.toolbar = toolbar;
this.state = new State();
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void toolbarSetElevation(float elevation) {
if (AndroidUtils.isLollipop()) {
toolbar.setElevation(elevation == 0 ? 0 : TOOLBAR_ELEVATION);
}
}
private void toolbarAnimateShow(final int verticalOffset) {
toolbar.animate()
.translationY(0)
.setInterpolator(new LinearInterpolator())
.setDuration(180)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
toolbarSetElevation(verticalOffset == 0 ? 0 : TOOLBAR_ELEVATION);
}
});
}
private void toolbarAnimateHide() {
toolbar.animate()
.translationY(-toolbar.getHeight())
.setInterpolator(new LinearInterpolator())
.setDuration(180)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
toolbarSetElevation(0);
}
});
}
@Override
public final void onScrollStateChanged(RecyclerView recyclerView, int newState) {
if (newState == RecyclerView.SCROLL_STATE_IDLE) {
if (state.scrollingOffset > 0) {
if (state.verticalOffset > toolbar.getHeight()) {
toolbarAnimateHide();
} else {
toolbarAnimateShow(state.verticalOffset);
}
} else if (state.scrollingOffset < 0) {
if (toolbar.getTranslationY() < toolbar.getHeight() * -0.6 && state.verticalOffset > toolbar.getHeight()) {
toolbarAnimateHide();
} else {
toolbarAnimateShow(state.verticalOffset);
}
}
}
}
@Override
public final void onScrolled(RecyclerView recyclerView, int dx, int dy) {
state.verticalOffset = recyclerView.computeVerticalScrollOffset();
state.scrollingOffset = dy;
int toolbarYOffset = (int) (dy - toolbar.getTranslationY());
toolbar.animate().cancel();
if (state.scrollingOffset > 0) {
if (toolbarYOffset < toolbar.getHeight()) {
if (state.verticalOffset > toolbar.getHeight()) {
toolbarSetElevation(TOOLBAR_ELEVATION);
}
toolbar.setTranslationY(state.translationY = -toolbarYOffset);
} else {
toolbarSetElevation(0);
toolbar.setTranslationY(state.translationY = -toolbar.getHeight());
}
} else if (state.scrollingOffset < 0) {
if (toolbarYOffset < 0) {
if (state.verticalOffset <= 0) {
toolbarSetElevation(0);
}
toolbar.setTranslationY(state.translationY = 0);
} else {
if (state.verticalOffset > toolbar.getHeight()) {
toolbarSetElevation(TOOLBAR_ELEVATION);
}
toolbar.setTranslationY(state.translationY = -toolbarYOffset);
}
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void onRestoreInstanceState(State state) {
this.state.verticalOffset = state.verticalOffset;
this.state.scrollingOffset = state.scrollingOffset;
if (AndroidUtils.isLollipop()) {
toolbar.setElevation(state.elevation);
toolbar.setTranslationY(state.translationY);
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public State onSaveInstanceState() {
state.translationY = toolbar.getTranslationY();
if (AndroidUtils.isLollipop()) {
state.elevation = toolbar.getElevation();
}
return state;
}
/**
* Parcelable RecyclerView/Toolbar state for simpler saving/restoring its current state.
*/
public static final class State implements Parcelable {
public static Creator<State> CREATOR = new Creator<State>() {
public State createFromParcel(Parcel parcel) {
return new State(parcel);
}
public State[] newArray(int size) {
return new State[size];
}
};
// Keeps track of the overall vertical offset in the list
private int verticalOffset;
// Determines the scroll UP/DOWN offset
private int scrollingOffset;
// Toolbar values
private float translationY;
private float elevation;
State() {
}
State(Parcel parcel) {
this.verticalOffset = parcel.readInt();
this.scrollingOffset = parcel.readInt();
this.translationY = parcel.readFloat();
this.elevation = parcel.readFloat();
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel parcel, int flags) {
parcel.writeInt(verticalOffset);
parcel.writeInt(scrollingOffset);
parcel.writeFloat(translationY);
parcel.writeFloat(elevation);
}
}
}
private RecyclerViewUtils() {
}
}
<|start_filename|>app/src/main/java/com/tinbytes/samples/showhidetoolbar/MainActivity.java<|end_filename|>
/*
* Copyright 2015, <NAME> & Tinbytes, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.tinbytes.samples.showhidetoolbar;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.annotation.TargetApi;
import android.os.Build;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.LinearInterpolator;
import android.widget.TextView;
import com.tinbytes.samples.showhidetoolbar.util.AndroidUtils;
import com.tinbytes.samples.showhidetoolbar.util.CityUtils;
import com.tinbytes.samples.showhidetoolbar.util.HelpUtils;
public class MainActivity extends AppCompatActivity {
// The elevation of the toolbar when content is scrolled behind
private static final float TOOLBAR_ELEVATION = 14f;
// To save/restore recyclerview state on configuration changes
private static final String STATE_RECYCLER_VIEW = "state-recycler-view";
private static final String STATE_VERTICAL_OFFSET = "state-vertical-offset";
private static final String STATE_SCROLLING_OFFSET = "state-scrolling-direction";
private static final String STATE_TOOLBAR_ELEVATION = "state-toolbar-elevation";
private static final String STATE_TOOLBAR_TRANSLATION_Y = "state-toolbar-translation-y";
// We need a reference to the toolbar for hide/show animation
private Toolbar tToolbar;
// We need a reference to the recyclerview to save/restore its state
private RecyclerView rvCities;
// Keeps track of the overall vertical offset in the list
private int verticalOffset;
// Determines the scroll UP/DOWN offset
private int scrollingOffset;
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main_activity);
// Assign Toolbar to the activity
tToolbar = (Toolbar) findViewById(R.id.tToolbar);
setSupportActionBar(tToolbar);
getSupportActionBar().setTitle(R.string.app_name);
// RecyclerView with sample data
rvCities = (RecyclerView) findViewById(R.id.rvCities);
rvCities.setLayoutManager(new LinearLayoutManager(this));
rvCities.setAdapter(new CitiesAdapter(CityUtils.CITIES));
if (savedInstanceState != null) {
if (AndroidUtils.isLollipop()) {
tToolbar.setElevation(savedInstanceState.getFloat(STATE_TOOLBAR_ELEVATION));
}
tToolbar.setTranslationY(savedInstanceState.getFloat(STATE_TOOLBAR_TRANSLATION_Y));
verticalOffset = savedInstanceState.getInt(STATE_VERTICAL_OFFSET);
scrollingOffset = savedInstanceState.getInt(STATE_SCROLLING_OFFSET);
rvCities.getLayoutManager().onRestoreInstanceState(savedInstanceState.getParcelable(STATE_RECYCLER_VIEW));
}
// We need to detect scrolling changes in the RecyclerView
rvCities.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
if (newState == RecyclerView.SCROLL_STATE_IDLE) {
if (scrollingOffset > 0) {
if (verticalOffset > tToolbar.getHeight()) {
toolbarAnimateHide();
} else {
toolbarAnimateShow(verticalOffset);
}
} else if (scrollingOffset < 0) {
if (tToolbar.getTranslationY() < tToolbar.getHeight() * -0.6 && verticalOffset > tToolbar.getHeight()) {
toolbarAnimateHide();
} else {
toolbarAnimateShow(verticalOffset);
}
}
}
}
@Override
public final void onScrolled(RecyclerView recyclerView, int dx, int dy) {
verticalOffset = rvCities.computeVerticalScrollOffset();
scrollingOffset = dy;
int toolbarYOffset = (int) (dy - tToolbar.getTranslationY());
tToolbar.animate().cancel();
if (scrollingOffset > 0) {
if (toolbarYOffset < tToolbar.getHeight()) {
if (verticalOffset > tToolbar.getHeight()) {
toolbarSetElevation(TOOLBAR_ELEVATION);
}
tToolbar.setTranslationY(-toolbarYOffset);
} else {
toolbarSetElevation(0);
tToolbar.setTranslationY(-tToolbar.getHeight());
}
} else if (scrollingOffset < 0) {
if (toolbarYOffset < 0) {
if (verticalOffset <= 0) {
toolbarSetElevation(0);
}
tToolbar.setTranslationY(0);
} else {
if (verticalOffset > tToolbar.getHeight()) {
toolbarSetElevation(TOOLBAR_ELEVATION);
}
tToolbar.setTranslationY(-toolbarYOffset);
}
}
}
});
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
protected void onSaveInstanceState(Bundle outState) {
if (AndroidUtils.isLollipop()) {
outState.putFloat(STATE_TOOLBAR_ELEVATION, tToolbar.getElevation());
}
outState.putFloat(STATE_TOOLBAR_TRANSLATION_Y, tToolbar.getTranslationY());
outState.putInt(STATE_VERTICAL_OFFSET, verticalOffset);
outState.putInt(STATE_SCROLLING_OFFSET, scrollingOffset);
outState.putParcelable(STATE_RECYCLER_VIEW, rvCities.getLayoutManager().onSaveInstanceState());
super.onSaveInstanceState(outState);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void toolbarSetElevation(float elevation) {
// setElevation() only works on Lollipop
if (AndroidUtils.isLollipop()) {
tToolbar.setElevation(elevation);
}
}
private void toolbarAnimateShow(final int verticalOffset) {
tToolbar.animate()
.translationY(0)
.setInterpolator(new LinearInterpolator())
.setDuration(180)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
toolbarSetElevation(verticalOffset == 0 ? 0 : TOOLBAR_ELEVATION);
}
});
}
private void toolbarAnimateHide() {
tToolbar.animate()
.translationY(-tToolbar.getHeight())
.setInterpolator(new LinearInterpolator())
.setDuration(180)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
toolbarSetElevation(0);
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_about:
HelpUtils.showAbout(this);
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Cities adapter to hold sample data for our RecyclerView.
*/
static class CitiesAdapter extends RecyclerView.Adapter<CitiesAdapter.ViewHolder> {
private String[] data;
static class ViewHolder extends RecyclerView.ViewHolder {
TextView tvName;
ViewHolder(View v) {
super(v);
tvName = (TextView) v.findViewById(R.id.tvName);
}
}
CitiesAdapter(String... data) {
this.data = data;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new ViewHolder(LayoutInflater.from(parent.getContext()).inflate(R.layout.city_item, parent, false));
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
holder.tvName.setText(data[position]);
}
@Override
public int getItemCount() {
return data.length;
}
}
}
| rylexr/android-show-hide-toolbar |
<|start_filename|>delete.go<|end_filename|>
// Copyright 2016 <NAME>. All rights reserved.
// Use of this source code is governed by the MIT license
// that can be found in the LICENSE file.
package bolthold
import (
"reflect"
"github.com/boltdb/bolt"
)
// Delete deletes a record from the bolthold, datatype just needs to be an example of the type stored so that
// the proper bucket and indexes are updated
func (s *Store) Delete(key, dataType interface{}) error {
return s.Bolt().Update(func(tx *bolt.Tx) error {
return s.TxDelete(tx, key, dataType)
})
}
// TxDelete is the same as Delete except it allows you specify your own transaction
func (s *Store) TxDelete(tx *bolt.Tx, key, dataType interface{}) error {
if !tx.Writable() {
return bolt.ErrTxNotWritable
}
storer := newStorer(dataType)
gk, err := encode(key)
if err != nil {
return err
}
b := tx.Bucket([]byte(storer.Type()))
if b == nil {
return ErrNotFound
}
value := reflect.New(reflect.TypeOf(dataType)).Interface()
bVal := b.Get(gk)
err = decode(bVal, value)
if err != nil {
return err
}
// delete data
err = b.Delete(gk)
if err != nil {
return err
}
// remove any indexes
err = indexDelete(storer, tx, gk, value)
if err != nil {
return err
}
return nil
}
// DeleteMatching deletes all of the records that match the passed in query
func (s *Store) DeleteMatching(dataType interface{}, query *Query) error {
return s.Bolt().Update(func(tx *bolt.Tx) error {
return s.TxDeleteMatching(tx, dataType, query)
})
}
// TxDeleteMatching does the same as DeleteMatching, but allows you to specify your own transaction
func (s *Store) TxDeleteMatching(tx *bolt.Tx, dataType interface{}, query *Query) error {
return deleteQuery(tx, dataType, query)
}
| rmg/bolthold |
<|start_filename|>Core/Field/Menu/FieldDebugControls.cs<|end_filename|>
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Input;
using System.Diagnostics;
using System.Linq;
namespace OpenVIII.Fields.IGMData
{
public class FieldDebugControls : OpenVIII.IGMData.Base
{
#region Fields
private const int TotalRows = 8;
private const int TotalBelow = 2;
private bool _skipRefresh;
#endregion Fields
#region Properties
public IGMDataItem.Text ClassicSpriteBatchMode { get => (IGMDataItem.Text)ITEM[4, 0]; protected set => ITEM[4, 0] = value; }
public IGMDataItem.Text Deswizzle { get => (IGMDataItem.Text)ITEM[6, 0]; protected set => ITEM[6, 0] = value; }
public IGMDataItem.Text FieldName { get => (IGMDataItem.Text)ITEM[0, 0]; protected set => ITEM[0, 0] = value; }
public IGMDataItem.Text ForceDump { get => (IGMDataItem.Text)ITEM[5, 0]; protected set => ITEM[5, 0] = value; }
public IGMDataItem.Text MouseLocationIn3D { get => (IGMDataItem.Text)ITEM[Count - 2, 0]; protected set => ITEM[Count - 2, 0] = value; }
public IGMDataItem.Text AreaName { get => (IGMDataItem.Text)ITEM[Count - 1, 0]; protected set => ITEM[Count - 1, 0] = value; }
public IGMDataItem.Text PerspectiveQuadMode { get => (IGMDataItem.Text)ITEM[3, 0]; protected set => ITEM[3, 0] = value; }
public IGMDataItem.Text QuadBG { get => (IGMDataItem.Text)ITEM[2, 0]; protected set => ITEM[2, 0] = value; }
public IGMDataItem.Text Reswizzle { get => (IGMDataItem.Text)ITEM[7, 0]; protected set => ITEM[7, 0] = value; }
public IGMDataItem.Text WalkMesh { get => (IGMDataItem.Text)ITEM[1, 0]; protected set => ITEM[1, 0] = value; }
#endregion Properties
#region Methods
public static FieldDebugControls Create(Rectangle pos) => Create<FieldDebugControls>(TotalRows + TotalBelow, 1, new IGMDataItem.Box { Pos = pos }, 1, TotalRows);
public override bool Inputs()
{
Memory.IsMouseVisible = true;
if (Input2.DelayedButton(MouseButtons.MiddleButton))
{
Debug.WriteLine($"=== Tiles Under MouseLocation: {Module.Background.MouseLocation} ===");
foreach (var tile in Module.Background.TilesUnderMouse())
{
Debug.WriteLine(tile);
}
return true;
}
if (Input2.DelayedButton(Keys.F5))
{
return ReloadTexture();
}
return base.Inputs();
}
private bool ReloadTexture()
{
SetCursor_select(0);
Inputs_OKAY();
return true;
}
public override bool Inputs_CANCEL()
{
Memory.Module = OpenVIII.Module.MainMenuDebug;
//System.Console.WriteLine("pressed cancel: " + (base.Inputs_CANCEL()?"true":"false"));
return base.Inputs_CANCEL() || true;
}
public override void Inputs_Left()
{
if (CURSOR_SELECT == 0)
{
if (Memory.FieldHolder.FieldID > 0)
Memory.FieldHolder.FieldID--;
else
Memory.FieldHolder.FieldID = checked((ushort)(Memory.FieldHolder.Fields.Length - 1));
Module.ResetField();
}
else skipsnd = true;
base.Inputs_Left();
}
public override bool Inputs_OKAY()
{
var i = 0;
if (CURSOR_SELECT == i++)
{
Module.Background?.Dispose();//force all textures to reload.
Module.ResetField();
}
else if (CURSOR_SELECT == i++)
{
Module.Toggles = Module.Toggles.Flip(Toggles.WalkMesh);
Refresh();
}
else if (CURSOR_SELECT == i++)
{
Module.Toggles = Module.Toggles.Flip(Toggles.Quad);
if (Module.Toggles.HasFlag(Toggles.ClassicSpriteBatch))
Module.Toggles = Module.Toggles.Flip(Toggles.ClassicSpriteBatch);
Refresh();
}
else if (CURSOR_SELECT == i++)
{
if (Module.Toggles.HasFlag(Toggles.Quad))
{
Module.Toggles = Module.Toggles.Flip(Toggles.Perspective);
Refresh();
}
else skipsnd = true;
}
else if (CURSOR_SELECT == i++)
{
Module.Toggles = Module.Toggles.Flip(Toggles.ClassicSpriteBatch);
if (Module.Toggles.HasFlag(Toggles.Quad))
Module.Toggles = Module.Toggles.Flip(Toggles.Quad);
if (Module.Background.HasSpriteBatchTexturesLoaded)
Refresh();
else
Module.ResetField();
}
else if (CURSOR_SELECT == i++)
{
Module.Toggles = Module.Toggles.Flip(Toggles.DumpingData);
Refresh();
}
else if (CURSOR_SELECT == i++)
{
Module.Background.Deswizzle();
Refresh();
}
else if (CURSOR_SELECT == i)
{
Module.Background.Reswizzle();
Refresh();
}
else skipsnd = true;
base.Inputs_OKAY();
return true;
}
public override void Inputs_Right()
{
if (CURSOR_SELECT == 0)
{
if (Memory.FieldHolder.FieldID < checked((ushort)(Memory.FieldHolder.Fields.Length - 1)))
Memory.FieldHolder.FieldID++;
else
Memory.FieldHolder.FieldID = 0;
Module.ResetField();
}
else skipsnd = true;
base.Inputs_Right();
}
public override void Refresh()
{
if (_skipRefresh)
{
_skipRefresh = false;
return;
}
FieldName.Data = $"Field: { Memory.FieldHolder.FieldID} - { Memory.FieldHolder.GetString()?.ToUpper()}";
BLANKS[0] = false;
if (Module.Mod != FieldModes.Disabled)
{
WalkMesh.Data = $"Draw WalkMesh: {Module.Toggles.HasFlag(Toggles.WalkMesh)}";
QuadBG.Data = $"Draw Quad BG: {Module.Toggles.HasFlag(Toggles.Quad)}";
PerspectiveQuadMode.Data = $"Perspective for Quads: {Module.Toggles.HasFlag(Toggles.Quad) && Module.Toggles.HasFlag(Toggles.Perspective)}";
if (Module.Toggles.HasFlag(Toggles.Quad))
{
BLANKS[3] = false;
PerspectiveQuadMode.FontColor = Font.ColorID.White;
}
else
{
BLANKS[3] = true;
PerspectiveQuadMode.FontColor = Font.ColorID.Grey;
}
ClassicSpriteBatchMode.Data = $"Classic SpriteBatch: {Module.Toggles.HasFlag(Toggles.ClassicSpriteBatch)}";
ForceDump.Data = $"Onload Dump Textures: {Module.Toggles.HasFlag(Toggles.DumpingData)}";
Deswizzle.Data = "Deswizzle Tiles";
Reswizzle.Data = "Reswizzle Tiles";
foreach (var i in Enumerable.Range(1, Rows))
{
ITEM[i, 0].Show();
if (i != 3)
BLANKS[i] = false;
}
foreach (var i in Enumerable.Range(8, Rows - 8))
{
ITEM[i, 0].Hide();
BLANKS[i] = true;
}
}
else
{
foreach (var i in Enumerable.Range(1, Rows))
{
ITEM[i, 0].Hide();
BLANKS[i] = true;
}
}
BLANKS[Count - 1] = true;
BLANKS[Count - 2] = true;
AreaName.Data = Module.AreaName;
base.Refresh();
}
public override bool Update()
{
MouseLocationIn3D.Data = (Module.Background?.MouseLocation ?? Vector3.Zero) != Vector3.Zero
? $"Mouse Cords: {Module.Background?.MouseLocation}"
: null;
return base.Update();
}
protected override void Init()
{
base.Init();
foreach (var i in Enumerable.Range(0, Count))
{
ITEM[i, 0] = new IGMDataItem.Text { Pos = SIZE[i] };
}
Cursor_Status = Cursor_Status.Enabled;
AreaName.Pos=
MouseLocationIn3D.Pos = SIZE[Rows - 1];
AreaName.Scale =
MouseLocationIn3D.Scale = new Vector2(1.5f);
AreaName.Y =
MouseLocationIn3D.Y = Y + Height + 10;
AreaName.Y += 16;
_skipRefresh = true;
}
protected override void InitShift(int i, int col, int row)
{
base.InitShift(i, col, row);
SIZE[i].Inflate(-22, -8);
//SIZE[i].Offset(0, 12 + (-8 * row));
}
#endregion Methods
}
}
<|start_filename|>Core/AV/Music/Midi/MidiFile.cs<|end_filename|>
using System;
using System.IO;
using System.Text;
using System.Collections.Generic;
using NAudio.Midi;
using NAudio.Utils;
namespace OpenVIII.AV.Midi
{
public class MidiFile : NAudio.Midi.MidiFile
{
public class MergeSort
{
/// <summary>
/// In-place and stable implementation of MergeSort
/// </summary>
static void Sort<T>(IList<T> list, int lowIndex, int highIndex, IComparer<T> comparer)
{
if (lowIndex >= highIndex)
{
return;
}
int midIndex = (lowIndex + highIndex) / 2;
// Partition the list into two lists and Sort them recursively
Sort(list, lowIndex, midIndex, comparer);
Sort(list, midIndex + 1, highIndex, comparer);
// Merge the two sorted lists
int endLow = midIndex;
int startHigh = midIndex + 1;
while ((lowIndex <= endLow) && (startHigh <= highIndex))
{
// MRH, if use < 0 sort is not stable
if (comparer.Compare(list[lowIndex], list[startHigh]) <= 0)
{
lowIndex++;
}
else
{
// list[lowIndex] > list[startHigh]
// The next element comes from the second list,
// move the list[start_hi] element into the next
// position and shuffle all the other elements up.
T t = list[startHigh];
for (int k = startHigh - 1; k >= lowIndex; k--)
{
list[k + 1] = list[k];
}
list[lowIndex] = t;
lowIndex++;
endLow++;
startHigh++;
}
}
}
/// <summary>
/// MergeSort a list of comparable items
/// </summary>
public static void Sort<T>(IList<T> list) where T : IComparable<T>
{
Sort(list, 0, list.Count - 1, Comparer<T>.Default);
}
/// <summary>
/// MergeSort a list
/// </summary>
public static void Sort<T>(IList<T> list, IComparer<T> comparer)
{
Sort(list, 0, list.Count - 1, comparer);
}
}
public MidiFile(string filename) : base(filename, true)
{
}
private static void ExportBinary(BinaryWriter writer, MidiEventCollection events)
{
writer.Write(System.Text.Encoding.UTF8.GetBytes("MThd"));
writer.Write(SwapUInt32(6)); // chunk size
writer.Write(SwapUInt16((ushort)events.MidiFileType));
writer.Write(SwapUInt16((ushort)events.Tracks));
writer.Write(SwapUInt16((ushort)events.DeltaTicksPerQuarterNote));
for (int track = 0; track < events.Tracks; track++)
{
IList<MidiEvent> eventList = events[track];
writer.Write(System.Text.Encoding.UTF8.GetBytes("MTrk"));
long trackSizePosition = writer.BaseStream.Position;
writer.Write(SwapUInt32(0));
long absoluteTime = events.StartAbsoluteTime;
// use a stable sort to preserve ordering of MIDI events whose
// absolute times are the same
MergeSort.Sort(eventList, new MidiEventComparer());
if (eventList.Count > 0 && !MidiEvent.IsEndTrack(eventList[eventList.Count - 1])) {
Memory.Log.WriteLine("Exporting a track with a missing end track");
}
foreach (var midiEvent in eventList)
{
midiEvent.Export(ref absoluteTime, writer);
}
uint trackChunkLength = (uint)(writer.BaseStream.Position - trackSizePosition) - 4;
writer.BaseStream.Position = trackSizePosition;
writer.Write(SwapUInt32(trackChunkLength));
writer.BaseStream.Position += trackChunkLength;
}
}
private static uint SwapUInt32(uint i)
{
return ((i & 0xFF000000) >> 24) | ((i & 0x00FF0000) >> 8) | ((i & 0x0000FF00) << 8) | ((i & 0x000000FF) << 24);
}
private static ushort SwapUInt16(ushort i)
{
return (ushort)(((i & 0xFF00) >> 8) | ((i & 0x00FF) << 8));
}
private static void ExportCheckTracks(MidiEventCollection events)
{
if (events.MidiFileType == 0 && events.Tracks > 1)
{
throw new ArgumentException("Can't export more than one track to a type 0 file");
}
}
/// <summary>
/// Exports a MIDI file
/// </summary>
/// <param name="filename">Filename to export to</param>
/// <param name="events">Events to export</param>
public static void Export(string filename, MidiEventCollection events)
{
ExportCheckTracks(events);
using (var writer = new BinaryWriter(File.Create(filename)))
ExportBinary(writer, events);
}
/// <summary>
/// Exports a MIDI file
/// </summary>
/// <param name="stream">Stream to work with</param>
/// <param name="events">Events to export</param>
public static void Export(Stream stream, MidiEventCollection events)
{
ExportCheckTracks(events);
using (var writer = new BinaryWriter(stream))
ExportBinary(writer, events);
}
}
}
<|start_filename|>Core/ModuleHandler.cs<|end_filename|>
using Microsoft.Xna.Framework;
using OpenVIII.Encoding.Tags;
using System;
using System.Threading.Tasks;
namespace OpenVIII
{
public static class ModuleHandler
{
private static Module module = Memory.Module;
private static Module lastModule = Memory.Module;
public static async void Update(GameTime gameTime)
{
if (lastModule != module)
{
//got stuck on this once had to force close.
//GC.Collect();
//GC.WaitForPendingFinalizers();
lastModule = module;
}
module = Memory.Module;
//#if DEBUG
if (Input2.DelayedButton(FF8TextTagKey.Reset))// || Input2.DelayedButton(FF8TextTagKey.Cancel))
{
if (Memory.Module != Module.MainMenuDebug)// && Memory.Module != Module.BattleDebug)
{
Memory.Module = Module.MainMenuDebug;
InputMouse.Mode = MouseLockMode.Screen;
}
}
//#endif
switch (module)
{
//doesn't need memory
case Module.OvertureDebug:
case Module.MovieTest:
break;
default:
//requires memory to be loaded.
if ((Memory.InitTask != null) && (Memory.InitTask.IsCompleted == false ||
Memory.InitTask.Status == TaskStatus.Running ||
Memory.InitTask.Status == TaskStatus.WaitingToRun ||
Memory.InitTask.Status == TaskStatus.WaitingForActivation))
{
//task is still running loading assets blank screen and wait.
Memory.SuppressDraw = true;
await Memory.InitTask;
//fade in doesn't happen because time was set before the await.
//ending here causes update to be run again with new time
return;
}
break;
}
switch (module)
{
case Module.Battle:
ModuleBattle.Update();
break;
case Module.BattleDebug:
Menu.UpdateOnce();
ModuleBattleDebug.Update();
break;
case Module.MovieTest:
ModuleMovieTest.Update();
break;
case Module.FieldDebug:
Fields.Module.Update();
break;
case Module.OvertureDebug:
Module_overture_debug.Update();
break;
case Module.MainMenuDebug:
Menu.UpdateOnce();
Menu.Module.Update();
break;
case Module.WorldDebug:
Module_world_debug.Update(gameTime);
break;
case Module.FaceTest:
Module_face_test.Update();
break;
case Module.IconTest:
Module_icon_test.Update();
break;
case Module.CardTest:
Module_card_test.Update();
break;
case Module.FieldModelTest:
Fields.ModuleFieldObjectTest.Update();
break;
}
}
public static void Draw(GameTime gameTime)
{
switch (module)
{
//doesn't need memory
case Module.OvertureDebug:
case Module.MovieTest:
break;
default:
//requires memory to be loaded.
if ((Memory.InitTask != null) && (Memory.InitTask.IsCompleted == false ||
Memory.InitTask.Status == TaskStatus.Running ||
Memory.InitTask.Status == TaskStatus.WaitingToRun ||
Memory.InitTask.Status == TaskStatus.WaitingForActivation))
{
//suppress draw in update but if draw happens before update, blank screen, and end here
Memory.Graphics.GraphicsDevice.Clear(Color.Black);
return;
}
break;
}
switch (module)
{
case Module.Battle:
ModuleBattle.Draw();
break;
case Module.BattleDebug:
ModuleBattleDebug.Draw();
break;
case Module.MovieTest:
ModuleMovieTest.Draw();
break;
case Module.FieldDebug:
Fields.Module.Draw();
break;
case Module.OvertureDebug:
Module_overture_debug.Draw();
break;
case Module.MainMenuDebug:
Menu.Module.Draw();
break;
case Module.WorldDebug:
Module_world_debug.Draw();
break;
case Module.FaceTest:
Module_face_test.Draw();
break;
case Module.IconTest:
Module_icon_test.Draw();
break;
case Module.CardTest:
Module_card_test.Draw();
break;
case Module.BattleSwirl:
BattleSwirl.Draw();
break;
case Module.FieldModelTest:
Fields.ModuleFieldObjectTest.Draw();
break;
}
}
public static void ResetBS()
=> ModuleBattleDebug.ResetState();
}
}
<|start_filename|>Core/Menu/Images/Cards.cs<|end_filename|>
using Microsoft.Xna.Framework;
using System;
using System.Collections.Generic;
using System.Linq;
namespace OpenVIII
{
public sealed partial class Cards : SP2
{
#region Constructors
/// <summary>
/// Card images used in menus. The images used in the triple triad game are in the ff8.exe in
/// tim files.
/// </summary>
/// <seealso cref="http://forums.qhimm.com/index.php?topic=11084.0"/>
public Cards()
{
}
#endregion Constructors
#region Methods
public static Cards Load() => Load<Cards>();
protected override void DefaultValues()
{
base.DefaultValues();
Props = new List<TexProps>()
{
new TexProps{Filename="mc00.tex", Count = 1 },
new TexProps{Filename="mc01.tex", Count = 1 },
new TexProps{Filename="mc02.tex", Count = 1 },
new TexProps{Filename="mc03.tex", Count = 1 },
new TexProps{Filename="mc04.tex", Count = 1 },
new TexProps{Filename="mc05.tex", Count = 1 },
new TexProps{Filename="mc06.tex", Count = 1 },
new TexProps{Filename="mc07.tex", Count = 1 },
new TexProps{Filename="mc08.tex", Count = 1 },
new TexProps{Filename="mc09.tex", Count = 1 }
};
TextureStartOffset = 0;
EntriesPerTexture = 11;
IndexFilename = "cardanm.sp2";
}
public const float AspectRatio = 62f / 88f; //B6 paper
protected override void Init() => base.Init();
public override TextureHandler GetTexture(Enum id, int file = -1)
{
var pos = Convert.ToInt32(id);
var pageFile = pos / EntriesPerTexture;
return pos >= (int)(Cards.ID.Card_Back) ? Textures[0] : Textures[pageFile];
}
public override void Draw(Enum id, Rectangle dst, float fade = 1)
{
var v = Convert.ToUInt32(id);
uint pos;
if (v >= Convert.ToUInt32(Cards.ID.Card_Back))
{
//assuming to use back card for Card_Back, Immune and Fail
pos = Memory.Cards.Count - 1;
}
else
{
pos = (uint)(v % EntriesPerTexture);
}
var src = GetEntry(pos).GetRectangle;
var tex = GetTexture(id);
tex.Draw(dst, src, Color.White * fade);
}
public override Entry GetEntry(uint id)
{
if (Entries.ContainsKey(id))
{
return Entries[id];
}
return null;
}
#endregion Methods
}
}
<|start_filename|>Core/World/MiniMaps.cs<|end_filename|>
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
using OpenVIII.Encoding.Tags;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace OpenVIII.World
{
class MiniMaps
{
//exe hardcode at FF82000:00C76BE8
private static byte[] wm_planetMinimap_indicesA_quad = new byte[]
{
0x0f,0x10,0x17,0x18,0x07,0x08,0x0e,0x0f,0x08,0x09,0x0f,0x10,0x09,0x0a,0x10,0x11,0x0e,0x0f,0x16,0x17,0x10,0x11,0x18,0x19,0x16,0x17,0x1d,0x1e,0x17,0x18,0x1e,0x1f,0x18,0x19,0x1f,0x20,0x02,0x03,0x07,0x08,0x03,0x04,0x08,0x09,0x04,0x05,0x09,0x0a,0x06,0x07,0x0d,0x0e,0x0d,0x0e,0x15,0x16,0x15,0x16,0x1c,0x1d,0x0a,0x0b,0x11,0x12,0x11,0x12,0x19,0x1a,0x19,0x1a,0x20,0x21,0x1d,0x1e,0x22,0x23,0x1e,0x1f,0x23,0x24,0x1f,0x20,0x24,0x25,0x00,0x01,0x03,0x04,0x0c,0x0d,0x14,0x15,0x12,0x13,0x1a,0x1b,0x23,0x24,0x26,0x27
};
//exe hardcode at FF82000:00C76C4C
private static byte[] wm_planetMinimap_indicesB_tris = new byte[]
{
0x00,0x02,0x03,0xff,0x01,0x04,0x05,0xff,0x06,0x0c,0x0d,0xff,0x0b,0x12,0x13,0xff,0x14,0x15,0x1c,0xff,0x1a,0x1b,0x21,0xff,0x22,0x23,0x26,0xff,0x24,0x25,0x27,0xff,0x02,0x06,0x07,0xff,0x05,0x0a,0x0b,0xff,0x1c,0x1d,0x22,0xff,0x20,0x21,0x25,0xff
};
//exe hardcode at FF82000:00C76C7C
private static byte[] wm_planetMinimap_vertices = new byte[]
{
0xf8,0xdb,0x08,0xdb,0xe8,0xe2,0xf8,0xe2,0x08,0xe2,0x18,0xe2,0xe0,0xea,0xe8,0xea,0xf8,0xea,0x08,0xea,0x18,0xea,0x20,0xea,0xd8,0xf9,0xe0,0xf9,0xe8,0xf9,0xf8,0xf9,0x08,0xf9,0x18,0xf9,0x20,0xf9,0x28,0xf9,0xd8,0x07,0xe0,0x07,0xe8,0x07,0xf8,0x07,0x08,0x07,0x18,0x07,0x20,0x07,0x28,0x07,0xe0,0x16,0xe8,0x16,0xf8,0x16,0x08,0x16,0x18,0x16,0x20,0x16,0xe8,0x1e,0xf8,0x1e,0x08,0x1e,0x18,0x1e,0xf8,0x25,0x08,0x25
};
//exe hardcode at FF82000:00C76CCC
private static byte[] wm_planetMinimap_uvsOffsets = new byte[]
{
0xfc,0xe0,0x04,0xe0,0xf0,0xe8,0xfc,0xe8,0x04,0xe8,0x10,0xe8,0xe8,0xf0,0xf0,0xf0,0xfc,0xf0,0x04,0xf0,0x10,0xf0,0x18,0xf0,0xe0,0xfc,0xe8,0xfc,0xf0,0xfc,0xfc,0xfc,0x04,0xfc,0x10,0xfc,0x18,0xfc,0x20,0xfc,0xe0,0x04,0xe8,0x04,0xf0,0x04,0xfc,0x04,0x04,0x04,0x10,0x04,0x18,0x04,0x20,0x04,0xe8,0x10,0xf0,0x10,0xfc,0x10,0x04,0x10,0x10,0x10,0x18,0x10,0xf0,0x18,0xfc,0x18,0x04,0x18,0x10,0x18,0xfc,0x20,0x04,0x20,0x00,0x00,0x00,0x00
};
public static void DrawPlanetMiniMap()
{
var vpt = new List<VertexPositionTexture>();
var planetCamPos = new Vector3(2098.347f, 32.68309f, -244.1487f);
var planetCamTarget = new Vector3(2099.964f, 34.26089f, -234.208243f);
//2000,0,0 - target
Module_world_debug.viewMatrix = Matrix.CreateLookAt(planetCamPos, planetCamTarget,
Vector3.Up);
Module_world_debug.effect.View = Module_world_debug.viewMatrix;
Module_world_debug.effect.Projection = Matrix.CreatePerspectiveFieldOfView(
MathHelper.ToRadians(60), Memory.Graphics.GraphicsDevice.Viewport.AspectRatio, 1, 10000f);
for (var i = 0; i < wm_planetMinimap_indicesB_tris.Length; i++) //triangles are ABC, so we can just iterate one-by-one
{
var offsetPointer = wm_planetMinimap_indicesB_tris[i];
if (offsetPointer == 0xFF)
continue;
offsetPointer *= 2;
var vertX = (sbyte)wm_planetMinimap_vertices[offsetPointer];
var vertY = (sbyte)wm_planetMinimap_vertices[offsetPointer + 1];
//uv
short u = (sbyte)wm_planetMinimap_uvsOffsets[offsetPointer];
short v = (sbyte)wm_planetMinimap_uvsOffsets[offsetPointer + 1];
var UVu = Module_world_debug.playerPosition.X / -16384.0f + u / 100.0f;
var UVv = Module_world_debug.playerPosition.Z / -12288.0f + v / 100.0f;
var vec = new Vector3(-vertX + 2000f, -vertY, 0);
vpt.Add(new VertexPositionTexture(vec, new Vector2(UVu, UVv)));
}
for (var i = 0; i < wm_planetMinimap_indicesA_quad.Length; i += 4) //ABD ACD -> we have to retriangulate it
{
Vector3 A = Vector3.Zero, B = Vector3.Zero, C = Vector3.Zero, D = Vector3.Zero;
var uvA = Vector2.Zero; var uvB = Vector2.Zero; var uvC = Vector2.Zero; var uvD = Vector2.Zero;
for (var n = 0; n < 4; n++)
{
var offsetPointer = wm_planetMinimap_indicesA_quad[i + n];
offsetPointer *= 2;
var vertX = (sbyte)wm_planetMinimap_vertices[offsetPointer];
var vertY = (sbyte)wm_planetMinimap_vertices[offsetPointer + 1];
//uv
short u = (sbyte)wm_planetMinimap_uvsOffsets[offsetPointer];
short v = (sbyte)wm_planetMinimap_uvsOffsets[offsetPointer + 1];
var UVu = Module_world_debug.playerPosition.X / -16384.0f + u / 100.0f;
var UVv = Module_world_debug.playerPosition.Z / -12288.0f + v / 100.0f;
var vec = new Vector3(-vertX + 2000f, -vertY, 0);
var vecUV = new Vector2(UVu, UVv);
if (n == 0)
{ A = vec; uvA = vecUV; }
if (n == 1)
{ B = vec; uvB = vecUV; }
if (n == 2)
{ C = vec; uvC = vecUV; }
if (n == 3)
{ D = vec; uvD = vecUV; }
}
vpt.Add(new VertexPositionTexture(A, uvA));
vpt.Add(new VertexPositionTexture(B, uvB));
vpt.Add(new VertexPositionTexture(D, uvD));
vpt.Add(new VertexPositionTexture(A, uvA));
vpt.Add(new VertexPositionTexture(C, uvC));
vpt.Add(new VertexPositionTexture(D, uvD));
}
foreach (var pass in Module_world_debug.effect.CurrentTechnique.Passes)
{
Module_world_debug.effect.Texture = (Texture2D)Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.worldmapMinimap, 0);
pass.Apply();
Module_world_debug.effect.GraphicsDevice.DepthStencilState = DepthStencilState.None;
Memory.Graphics.GraphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleList, vpt.ToArray(), 0, vpt.Count / 3);
}
var src = new Rectangle(Point.Zero, Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.minimapPointer, 0).Size.ToPoint());
Vector2 Scale = Memory.Scale(src.Width, src.Height, ScaleMode.FitBoth);
src.Height = (int)((src.Width * Scale.X) / 30);
src.Width = (int)((src.Height * Scale.Y) / 30);
var dst = new Rectangle(
(int)(Memory.Graphics.GraphicsDevice.Viewport.Width / 1.24f),
(int)((float)Memory.Graphics.GraphicsDevice.Viewport.Height / 1.3f),
src.Width,
src.Height);
//Memory.SpriteBatchStartAlpha(sortMode: SpriteSortMode.BackToFront);
Memory.SpriteBatch.Begin(SpriteSortMode.BackToFront, BlendState.Additive);
Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.minimapPointer, 0).Draw(dst, Color.White * 1f, Module_world_debug.degrees * 6.3f / 360f + 2.5f, Vector2.Zero, SpriteEffects.None, 1f);
Memory.SpriteBatchEnd();
Module_world_debug.effect.GraphicsDevice.DepthStencilState = DepthStencilState.Default;
//restore matrices
Module_world_debug.viewMatrix = Matrix.CreateLookAt(Module_world_debug.camPosition, Module_world_debug.camTarget,
Vector3.Up);
Module_world_debug.effect.View = Module_world_debug.viewMatrix;
}
/// <summary>
/// Draws rectangle mini map
/// </summary>
public static void DrawRectangleMiniMap()
{
var src = new Rectangle(Point.Zero, Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.worldmapMinimap, 1).Size.ToPoint());
Vector2 Scale = Memory.Scale(src.Width, src.Height, ScaleMode.FitBoth);
src.Width = (int)(src.Width * Scale.X);
src.Height = (int)(src.Height * Scale.Y);
src.Height /= 2;
src.Width /= 2;
var dst =
new Rectangle(
Memory.Graphics.GraphicsDevice.Viewport.Width - (src.Width) - 50,
Memory.Graphics.GraphicsDevice.Viewport.Height - (src.Height) - 50,
src.Width,
src.Height);
var bc = Math.Abs(Module_world_debug.camPosition.X / 16384.0f);
var topX = dst.X + (dst.Width * bc);
var bd = Math.Abs(Module_world_debug.camPosition.Z / 12288f);
var topY = dst.Y + (dst.Height * bd);
//Memory.spriteBatch.Begin(SpriteSortMode.BackToFront, Memory.blendState_BasicAdd);
Memory.SpriteBatchStartAlpha(sortMode: SpriteSortMode.BackToFront);
Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.worldmapMinimap, 1)
.Draw(dst, Color.White * .7f);
Memory.SpriteBatch.End();
//Memory.SpriteBatchStartAlpha(sortMode: SpriteSortMode.BackToFront);
Memory.SpriteBatch.Begin(SpriteSortMode.BackToFront, BlendState.Additive);
src = new Rectangle(Point.Zero,
Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.minimapPointer, 0).Size.ToPoint());
Scale = Memory.Scale(src.Width, src.Height, ScaleMode.FitBoth);
src.Height = (int)((src.Width * Scale.X) / 30);
src.Width = (int)((src.Height * Scale.Y) / 30);
var dst2 = new Rectangle(
(int)topX,
(int)topY,
src.Width,
src.Height);
Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.minimapPointer, 0)
.Draw(dst2,
Color.White * 1f,
Module_world_debug.degrees * 6.3f / 360f + 2.5f,
Vector2.Zero,
SpriteEffects.None,
1f);
float localRotation = MathHelper.ToDegrees(
Module_world_debug.worldCharacterInstances[Module_world_debug.currentControllableEntity].localRotation);
if (localRotation < 0) localRotation += 360f;
src = new Rectangle(Point.Zero,
Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.minimapGunbladePointer, 0).Size.ToPoint());
Scale = Memory.Scale(src.Width, src.Height, ScaleMode.FitBoth);
src.Height = (int)((src.Width * Scale.X) / 30);
src.Width = (int)((src.Height * Scale.Y) / 30);
topX = dst2.X;// + (dst2.Width * bc);
topY = dst2.Y;// + (dst2.Height * bd);
dst = new Rectangle(
(int)topX,
(int)topY,
(int)src.Width,
(int)src.Height);
Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.minimapGunbladePointer, 0).Draw(
dst,
Color.White * 1f,
(-localRotation + 90f) * 6.3f / 360f,
new Vector2(8, 8),
SpriteEffects.None, 1f);
Memory.SpriteBatchEnd();
}
private static float fulscrMapCurX = 0.5f;
private static float fulscrMapCurY = 0.4f;
struct fullScreenMapLocations
{
public bool bDraw;
public float x;
public float y;
public FF8String locationName;
}
private static fullScreenMapLocations[] screenMapLocations = new fullScreenMapLocations[19]
{
new fullScreenMapLocations() {x= 0.555f, y= 0.345f}, //BGarden
new fullScreenMapLocations() {x= 0.530f, y= 0.370f}, //BCity
new fullScreenMapLocations() {x= 0.465f, y= 0.315f}, //Dollet
new fullScreenMapLocations() {x= 0.449f, y= 0.455f}, //Timber
new fullScreenMapLocations() {x= 0.410f, y= 0.370f}, //GGarden
new fullScreenMapLocations() {x= 0.360f, y= 0.350f}, //Deling
new fullScreenMapLocations() {x= 0.375f, y= 0.450f}, //<NAME>
new fullScreenMapLocations() {x= 0.340f, y= 0.415f}, //Missile Base
new fullScreenMapLocations() {x= 0.542f, y= 0.475f}, //Fisherman
new fullScreenMapLocations() {x= 0.385f, y= 0.510f}, //Winhill
new fullScreenMapLocations() {x= 0.430f, y= 0.760f}, //Edea House
new fullScreenMapLocations() {x= 0.611f, y= 0.240f}, //Trabia
new fullScreenMapLocations() {x= 0.525f, y= 0.14f}, //Shumi
new fullScreenMapLocations() {x= 0.640f, y= 0.450f}, //Esthar city
new fullScreenMapLocations() {x= 0.620f, y= 0.485f}, //Esthar airstation
new fullScreenMapLocations() {x= 0.687f, y= 0.460f}, //Lunatic pandora
new fullScreenMapLocations() {x= 0.715f, y= 0.51f}, //Lunar gate
new fullScreenMapLocations() {x= 0.675f, y= 0.535f}, //Esthar memorial
new fullScreenMapLocations() {x= 0.698f, y= 0.590f}, //Tear point
};
//0.145 - 0.745 X
//0.070 - 0.870 Y
private static bool bFullScreenMapInitialize = true;
private static Texture2D fullScreenMapMark;
public static void DrawFullScreenMap()
{
if (bFullScreenMapInitialize)
{
screenMapLocations[0].locationName = Module_world_debug.wmset.GetLocationName(0); //BalambGarden
screenMapLocations[0].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[1].locationName = Module_world_debug.wmset.GetLocationName(1); //Balamb City
screenMapLocations[1].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[2].locationName = Module_world_debug.wmset.GetLocationName(3); //DOllet
screenMapLocations[2].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[3].locationName = Module_world_debug.wmset.GetLocationName(4); //Timber
screenMapLocations[3].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[4].locationName = Module_world_debug.wmset.GetLocationName(6); //Galbadia Garden
screenMapLocations[4].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[5].locationName = Module_world_debug.wmset.GetLocationName(8); //Deling
screenMapLocations[5].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[6].locationName = Module_world_debug.wmset.GetLocationName(10); //Desert Prison
screenMapLocations[6].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[7].locationName = Module_world_debug.wmset.GetLocationName(11); //Missile Base
screenMapLocations[7].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[8].locationName = Module_world_debug.wmset.GetLocationName(13); //Fisherman
screenMapLocations[8].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[9].locationName = Module_world_debug.wmset.GetLocationName(15); //Winhill
screenMapLocations[9].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[10].locationName = Module_world_debug.wmset.GetLocationName(18); //Edea house
screenMapLocations[10].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[11].locationName = Module_world_debug.wmset.GetLocationName(19); //Trabia
screenMapLocations[11].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[12].locationName = Module_world_debug.wmset.GetLocationName(20); //shumi
screenMapLocations[12].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[13].locationName = Module_world_debug.wmset.GetLocationName(26); //Esthar city
screenMapLocations[13].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[14].locationName = Module_world_debug.wmset.GetLocationName(27); //Esthar airstation
screenMapLocations[14].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[15].locationName = Module_world_debug.wmset.GetLocationName(28); //Lunatic pandora lab
screenMapLocations[15].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[16].locationName = Module_world_debug.wmset.GetLocationName(29); //lunar gate
screenMapLocations[16].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[17].locationName = Module_world_debug.wmset.GetLocationName(30); //esthar memorial
screenMapLocations[17].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
screenMapLocations[18].locationName = Module_world_debug.wmset.GetLocationName(31); //tear point
screenMapLocations[18].bDraw = true; //[TODO] SAVEGAME PARSE FLAGS
fullScreenMapMark = new Texture2D(Memory.Graphics.GraphicsDevice, 2, 1, false, SurfaceFormat.Color); //1x1 yellow and 1x1 red
fullScreenMapMark.SetData(new Color[] { Color.Yellow, Color.Red });
bFullScreenMapInitialize = false;
}
//Draw full-screen minimap
Memory.Graphics.GraphicsDevice.Clear(Color.Black);
Memory.SpriteBatchStartStencil();
var texture = Module_world_debug.wmset.GetWorldMapTexture(Wmset.Section38_textures.worldmapMinimap, 0);
var width = Memory.Graphics.GraphicsDevice.Viewport.Width;
var height = Memory.Graphics.GraphicsDevice.Viewport.Height;
texture.Draw(new Rectangle((int)(width * 0.2f), (int)(height * 0.08f),
(int)(width * 0.6), (int)(height * 0.8)), Color.White * 1f);
Memory.SpriteBatchEnd();
Memory.SpriteBatchStartAlpha();
//draw locations
for (var i = 0; i < screenMapLocations.Length; i++)
{
if (!screenMapLocations[i].bDraw)
continue;
Memory.SpriteBatch.Draw(fullScreenMapMark, new Rectangle((int)(width * screenMapLocations[i].x),
(int)(height * screenMapLocations[i].y), 8, 8),
new Rectangle(0, 0, 1, 1), Color.White);
}
//draw vehicles
//[TODO]
//draw location names
//[TODO]
for (var i = 0; i < screenMapLocations.Length; i++)
{
var xDistance = Math.Abs((fulscrMapCurX + 0.05f) - screenMapLocations[i].x);
var yDistance = Math.Abs((fulscrMapCurY + 0.005f) - screenMapLocations[i].y);
if (xDistance < 0.015 && yDistance < 0.015)
Memory.Font.RenderBasicText(screenMapLocations[i].locationName,
new Vector2(width * 0.7f, height * 0.9f), new Vector2(2, 2), Font.Type.sysFntBig);
}
Memory.SpriteBatchEnd();
//Finally draw cursor
Memory.SpriteBatchStartAlpha();
Memory.Icons.Draw(Icons.ID.Finger_Right, 2,
new Rectangle((int)(fulscrMapCurX * width), (int)(fulscrMapCurY * height), 0, 0), Vector2.One * 3);
Memory.SpriteBatchEnd();
}
internal static void Input()
{
if (Input2.Button(FF8TextTagKey.Up)/* || shift.Y > 0*/)
{
if (fulscrMapCurY < 0.070f)
fulscrMapCurY = 0.870f;
fulscrMapCurY -= 0.005f * (float)Memory.ElapsedGameTime.TotalMilliseconds / 25.0f;
}
else if (Input2.Button(FF8TextTagKey.Down)/* || shift.Y < 0*/)
{
if (fulscrMapCurY > 0.870f)
fulscrMapCurY = 0.070f;
fulscrMapCurY += 0.005f * (float)Memory.ElapsedGameTime.TotalMilliseconds / 25.0f;
}
if (Input2.Button(FF8TextTagKey.Left) /*|| shift.X < 0*/)
{
if (fulscrMapCurX < 0.145f)
fulscrMapCurX = 0.745f;
fulscrMapCurX -= 0.003f * (float)Memory.ElapsedGameTime.TotalMilliseconds / 25.0f;
}
else if (Input2.Button(FF8TextTagKey.Right)/* || shift.X > 0*/)
{
if (fulscrMapCurX > 0.745)
fulscrMapCurX = 0.145f;
fulscrMapCurX += 0.003f * (float)Memory.ElapsedGameTime.TotalMilliseconds / 25.0f;
}
}
internal static void imgui()
{
ImGuiNET.ImGui.InputFloat("X: ", ref fulscrMapCurX); //0.145 - 0.745
ImGuiNET.ImGui.InputFloat("Y: ", ref fulscrMapCurY); //0.070 - 0.870
for (var i = 0; i < screenMapLocations.Length; i++)
{
ImGuiNET.ImGui.InputFloat($"lA{i}", ref screenMapLocations[i].x);
ImGuiNET.ImGui.SameLine();
ImGuiNET.ImGui.InputFloat($"lB{i}", ref screenMapLocations[i].y);
}
}
}
}
<|start_filename|>Core/Field/ONE/FieldCharaOne.cs<|end_filename|>
using Microsoft.Xna.Framework.Graphics;
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Linq;
namespace OpenVIII.Fields
{
//this is static, because it's always alive
public static class FieldMainCharaOne
{
#region Fields
public static bool BAlreadyInitialized;
public static MainFieldChara[] MainFieldCharacters;
#endregion Fields
#region Methods
public static void Init(bool bForce = false)
{
if (BAlreadyInitialized && !bForce)
return;
var mfc = new List<MainFieldChara>();
var aw = ArchiveWorker.Load(Memory.Archives.A_FIELD);
var test = aw.GetListOfFiles();
var collectionEntry = test.Where(x => x.ToLower().Contains("main_chr")).ToList().AsReadOnly();
if (!collectionEntry.Any()) return;
var fieldArchiveName = collectionEntry.First();
var fieldArchive = aw.GetArchive(fieldArchiveName);
var test2 = fieldArchive.GetListOfFiles();
foreach (var fieldArchiveFileName in test2)
{
//if (test_[i].Contains("d008.mch"))
// continue;
if (string.IsNullOrWhiteSpace(fieldArchiveFileName))
continue;
var oneBytes = fieldArchive.GetBinaryFile(fieldArchiveFileName);
if (oneBytes.Length < 64) //Hello Kazuo Suzuki! I will skip your dummy files
continue;
var currentLocalChara = ReadMainChara(oneBytes);
// Have to accomodate both Linux and Windows paths
var dotIndex = fieldArchiveFileName.LastIndexOf(".");
var localId = int.Parse(fieldArchiveFileName.Substring(dotIndex - 3, 3));
currentLocalChara.ID = localId;
mfc.Add(currentLocalChara);
}
MainFieldCharacters = mfc.ToArray();
BAlreadyInitialized = true;
}
private static MainFieldChara ReadMainChara(byte[] oneBytes)
{
var localMfc = new MainFieldChara();
using (var ms = new MemoryStream(oneBytes))
using (var br = new BinaryReader(ms))
{
var timOffsets = new List<uint>();
uint timOffset;
while ((timOffset = br.ReadUInt32()) != 0xffffffff)
timOffsets.Add(timOffset & 0x00FFFFFF);
var modelPointer = br.ReadUInt32();
//read textures
var texture2DReader = timOffsets
.Select(x => new TIM2(br, x))
.Select(x => x.GetTexture()).ToList()
.AsReadOnly();
localMfc.Textures = texture2DReader.ToArray();
//read models
ms.Seek(modelPointer, SeekOrigin.Begin);
var mch = new Debug_MCH(ms, br, Debug_MCH.mchMode.FieldMain);
localMfc.MCH = mch;
}
return localMfc;
}
#endregion Methods
#region Structs
public struct MainFieldChara
{
#region Fields
public int ID;
public Debug_MCH MCH;
public Texture2D[] Textures;
#endregion Fields
}
#endregion Structs
}
internal class FieldCharaOne
{
#region Fields
public CharaModelHeaders[] FieldModels;
[SuppressMessage("ReSharper", "NotAccessedField.Local")] private readonly int _fieldId;
#endregion Fields
#region Constructors
public FieldCharaOne(int fieldId)
{
_fieldId = fieldId;
if (!FieldMainCharaOne.BAlreadyInitialized)
FieldMainCharaOne.Init();
var aw = ArchiveWorker.Load(Memory.Archives.A_FIELD);
var test = aw.GetListOfFiles();
var collectionEntry = test.Where(x => x.IndexOf(Memory.FieldHolder.Fields[Memory.FieldHolder.FieldID], StringComparison.OrdinalIgnoreCase) >= 0).ToList().AsReadOnly();
if (!collectionEntry.Any()) return;
var fieldArchiveName = collectionEntry.First();
var fieldArchive = aw.GetArchive(fieldArchiveName);
var test2 = fieldArchive.GetListOfFiles();
string one;
//string main_chr;
try
{
one = test2.First(x => x.EndsWith(".one", StringComparison.OrdinalIgnoreCase));
}
catch
{
return;
}
var oneBytes = fieldArchive.GetBinaryFile(one);
if (oneBytes.Length == 0)
return;
ReadBuffer(oneBytes);
}
#endregion Constructors
#region Methods
private void ReadBuffer(byte[] oneBuffer)
{
var cmh = new List<CharaModelHeaders>();
using (var ms = new MemoryStream(oneBuffer))
using (var br = new BinaryReader(ms))
{
var nModels = br.ReadUInt32();
for (var i = 0; i < nModels; i++)
{
var localCmh = new CharaModelHeaders
{
Offset = br.ReadUInt32() + 4,
Size = br.ReadUInt32(),
Size2 = br.ReadUInt32(),
FlagDword = br.ReadUInt32()
};
var bIgnorePadding = false;
var bMainChara = false;
if (localCmh.FlagDword >> 24 == 0xD0) //main character file
{
localCmh.TIMOffset = new uint[0];
localCmh.ModelDataOffset = 0xFFFFFFFF;
ms.Seek(4, SeekOrigin.Current);
bMainChara = true;
}
else if (localCmh.FlagDword >> 24 == 0xa0) //unknown- object without texture/ placeholder?
{
localCmh.TIMOffset = new uint[0];
localCmh.ModelDataOffset = 0xFFFFFFFF;
ms.Seek(8, SeekOrigin.Current);
bIgnorePadding = true;
}
else
{
var timOffsets = new List<uint>();
// ReSharper disable once UnusedVariable
var flagTimOffset = localCmh.FlagDword & 0x00FFFFFF;
timOffsets.Add(localCmh.FlagDword << 8);
uint localTimOffset;
while ((localTimOffset = br.ReadUInt32()) != 0xFFFFFFFF)
timOffsets.Add(localTimOffset & 0x00FFFFFF);
localCmh.TIMOffset = timOffsets.ToArray();
localCmh.ModelDataOffset = br.ReadUInt32();
}
localCmh.ModelName = br.ReadChars(8);
localCmh.Padding = br.ReadUInt32();
if (localCmh.Padding != 0xEEEEEEEE && !bIgnorePadding) //null models for placeholders are 2 not eeeeeeee
throw new Exception("Chara one- padding was not 0xEEEEEEEE- check code for ReadBuffer in FieldCharaOne");
int lastMsPosition;
if (localCmh.ModelDataOffset != 0xFFFFFFFF)
{
lastMsPosition = (int)ms.Position;
ms.Seek(localCmh.Offset + localCmh.ModelDataOffset, SeekOrigin.Begin);
localCmh.MCH = new Debug_MCH(ms, br, Debug_MCH.mchMode.FieldNPC, 3f);
//ms.Seek(localCmh.offset + 4, SeekOrigin.Begin);
var texList = new List<Texture2D>();
for (var n = 0; n < localCmh.TIMOffset.Length; n++)
{
if (localCmh.TIMOffset[n] > 0x10000000)
localCmh.TIMOffset[n] = localCmh.TIMOffset[n] & 0x00FFFFFF;
var localTim = new TIM2(br, localCmh.Offset + localCmh.TIMOffset[n]);
texList.Add(localTim.GetTexture());
}
localCmh.Textures = texList.ToArray();
ms.Seek(lastMsPosition, SeekOrigin.Begin);
}
else if (bMainChara)
{
lastMsPosition = (int)ms.Position;
//this is main chara, so please grab data from main_chr.fs
var getRefId = int.Parse(new string(localCmh.ModelName).Substring(1, 3));
var chara = FieldMainCharaOne.MainFieldCharacters.First(x => x.ID == getRefId);
localCmh.ModelDataOffset = 1;
localCmh.MCH = chara.MCH;
localCmh.Textures = chara.Textures;
ms.Seek(localCmh.Offset, SeekOrigin.Begin);
localCmh.MCH.MergeAnimations(ms, br);
ms.Seek(lastMsPosition, SeekOrigin.Begin);
}
cmh.Add(localCmh);
}
}
FieldModels = cmh.ToArray();
}
#endregion Methods
#region Structs
public struct CharaModelHeaders
{
#region Fields
///this is either tim or indicator of main model
public uint FlagDword;
public Debug_MCH MCH;
public uint ModelDataOffset;
///8
public char[] ModelName;
///points to texture
public uint Offset;
///0xEEEEEEEE
public uint Padding;
///size of whole segment
public uint Size;
///as above
public uint Size2;
public Texture2D[] Textures;
///pointer to zero
public uint[] TIMOffset;
#endregion Fields
}
#endregion Structs
}
}
<|start_filename|>Testing/Testing/MIDITest.cs<|end_filename|>
using System;
using System.Security.Cryptography;
using NUnit.Framework;
namespace OpenVIII.Tests
{
using FFmpeg.AutoGen;
[TestFixture]
public class MIDITest
{
#region Methods
[Test]
public void LoadingTest()
{
AV.Midi.Fluid fluidMidi = loadMidiData();
fluidMidi.FluidWorker_ProduceMid();
SHA256 mySHA256 = SHA256.Create();
string actual = byteArrayToString(mySHA256.ComputeHash(fluidMidi.midBuffer));
Assert.AreEqual("9e3ab13fc48c813864fbe8904594c2abfb576364453456d792d788ee3f5e8f99", actual);
}
[Test]
public void ProcessTest()
{
AV.Midi.Fluid fluidMidi = loadMidiData();
Core.AV.Music.Midi.MidiProcessor midiProcessor = new Core.AV.Music.Midi.MidiProcessor(AV.Midi.Fluid.lbinbins, AV.Midi.Fluid.tetr, AV.Midi.Fluid.seqt, AV.Midi.Fluid.tims);
// It seems that none of the exported MIDIs have an end track?
// Which makes sense because the music is supposed to loop.
// But why wasn't the assertion triggered before?
var mid = midiProcessor.Process();
}
// Display the byte array in a readable format.
// Adapted from https://docs.microsoft.com/en-us/dotnet/api/system.security.cryptography.sha256?view=netcore-3.1
private static string byteArrayToString(byte[] array)
{
string hash = "";
for (int i = 0; i < array.Length; i++)
{
hash += $"{array[i]:x2}";
}
return hash;
}
private AV.Midi.Fluid loadMidiData()
{
Memory.Init(null, null, null, null);
AV.Music.Init();
// Load the overture MIDI.
var filename = Memory.DicMusic[(MusicId)79][0];
AV.Midi.Fluid fluidMidi = new AV.Midi.Fluid();
fluidMidi.ReadSegmentFileManually(filename);
return fluidMidi;
}
#endregion Methods
}
}
| RJesusTVD/OpenVIII-monogame |
<|start_filename|>kstream/k_table.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package kstream
//import (
// "github.com/tryfix/kstream/encoding"
// "github.com/tryfix/kstream/logger"
// "github.com/tryfix/kstream/kstream/store"
//)
//
//type KTable struct {
// kStream
// store store.Store
//}
//
//func NewKTable(topic string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...Option) Stream {
// if keyEncoder == nil {
// logger.DefaultLogger.Fatal(`k-stream.kStream`, `keyEncoder cannot be null`)
// }
//
// if valEncoder == nil {
// logger.DefaultLogger.Fatal(`k-stream.kStream`, `valEncoder cannot be null`)
// }
//
// return newKStream(func(s string) string { return topic }, keyEncoder, valEncoder, nil, options...)
//}
<|start_filename|>kstream/store/indexed_store.go<|end_filename|>
package store
import (
"context"
nativeErrors "errors"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/encoding"
//goEncoding "encoding"
"sync"
"time"
)
type Index interface {
String() string
Write(key, value interface{}) error
WriteHash(hash, key interface{}) error
Hash(key, val interface{}) (hash interface{})
Delete(key, value interface{}) error
Read(index interface{}) ([]interface{}, error)
Keys() []interface{}
Values() map[interface{}][]interface{}
ValueIndexed(index, value interface{}) (bool, error)
}
type IndexedStore interface {
Store
GetIndex(ctx context.Context, name string) (Index, error)
Indexes() []Index
GetIndexedRecords(ctx context.Context, index string, key interface{}) ([]interface{}, error)
}
type indexedStore struct {
Store
indexes map[string]Index
mu *sync.Mutex
}
func NewIndexedStore(name string, keyEncoder, valEncoder encoding.Encoder, indexes []Index, options ...Options) (IndexedStore, error) {
store, err := NewStore(name, keyEncoder, valEncoder, options...)
if err != nil {
return nil, err
}
idxs := make(map[string]Index)
for _, idx := range indexes {
idxs[idx.String()] = idx
}
return &indexedStore{
Store: store,
indexes: idxs,
mu: new(sync.Mutex),
}, nil
}
func (i *indexedStore) Set(ctx context.Context, key, val interface{}, expiry time.Duration) error {
// set indexes
i.mu.Lock()
defer i.mu.Unlock()
for _, index := range i.indexes {
// get the previous value for the indexed key
valPrv, err := i.Get(ctx, key)
if err != nil {
return err
}
// if previous exists and different from current value
// eg: val.name=foo -> val.name=bar then find index for foo and delete
if valPrv != nil {
hash := index.Hash(key, valPrv)
// check if value already indexed
indexed, err := index.ValueIndexed(hash, key)
if err != nil {
return err
}
// if already indexed remove from previous index
if indexed {
if err := index.Delete(key, valPrv); err != nil {
return err
}
}
}
if err := index.Write(key, val); err != nil {
return errors.WithPrevious(err, `index write failed`)
}
}
return i.Store.Set(ctx, key, val, expiry)
}
func (i *indexedStore) Delete(ctx context.Context, key interface{}) error {
// delete indexes
val, err := i.Store.Get(ctx, key)
if err != nil {
return err
}
i.mu.Lock()
defer i.mu.Unlock()
if val != nil {
for _, index := range i.indexes {
if err := index.Delete(key, val); err != nil {
return err
}
}
}
return i.Store.Delete(ctx, key)
}
func (i *indexedStore) GetIndex(_ context.Context, name string) (Index, error) {
i.mu.Lock()
defer i.mu.Unlock()
index, ok := i.indexes[name]
if !ok {
return nil, fmt.Errorf(`associate [%s] does not exist`, name)
}
return index, nil
}
func (i *indexedStore) Indexes() []Index {
i.mu.Lock()
defer i.mu.Unlock()
var idxs []Index
for _, idx := range i.indexes {
idxs = append(idxs, idx)
}
return idxs
}
func (i *indexedStore) GetIndexedRecords(ctx context.Context, index string, key interface{}) ([]interface{}, error) {
i.mu.Lock()
idx, ok := i.indexes[index]
i.mu.Unlock()
if !ok {
return nil, fmt.Errorf(`associate [%s] does not exist`, index)
}
var records []interface{}
indexes, err := idx.Read(key)
if err != nil {
if nativeErrors.Is(err, UnknownIndex) {
return records, nil
}
return nil, err
}
for _, index := range indexes {
record, err := i.Get(ctx, index)
if err != nil {
return nil, err
}
records = append(records, record)
}
return records, nil
}
<|start_filename|>kstream/store/http.go<|end_filename|>
package store
import (
"context"
"encoding/json"
"errors"
"fmt"
"github.com/gorilla/handlers"
"github.com/gorilla/mux"
"github.com/tryfix/log"
"net/http"
)
type Err struct {
Err string `json:"error"`
}
type handler struct {
logger log.Logger
}
func (h *handler) encodeStores(w http.ResponseWriter, stores []Store) error {
var list []string
for _, stor := range stores {
list = append(list, stor.Name())
}
return json.NewEncoder(w).Encode(list)
}
func (h *handler) encodeIndexes(w http.ResponseWriter, indexes []Index) error {
var list []string
for _, stor := range indexes {
list = append(list, stor.String())
}
return json.NewEncoder(w).Encode(list)
}
func (h *handler) encodeAll(w http.ResponseWriter, i Iterator) error {
var keyVals []struct {
Key interface{} `json:"key"`
Value interface{} `json:"value"`
}
for i.Valid() {
keyVal := struct {
Key interface{} `json:"key"`
Value interface{} `json:"value"`
}{}
k, err := i.Key()
if err != nil {
h.logger.Error(err)
i.Next()
continue
}
v, err := i.Value()
if err != nil {
h.logger.Error(err)
i.Next()
continue
}
keyVal.Key = k
keyVal.Value = v
keyVals = append(keyVals, keyVal)
i.Next()
}
return json.NewEncoder(w).Encode(keyVals)
}
func (h *handler) encodeItem(w http.ResponseWriter, k interface{}, v interface{}) error {
keyVal := struct {
Key interface{} `json:"key"`
Value interface{} `json:"value"`
}{}
keyVal.Key = k
keyVal.Value = v
return json.NewEncoder(w).Encode(keyVal)
}
func (h *handler) encodeError(e error) []byte {
byt, err := json.Marshal(Err{
Err: e.Error(),
})
if err != nil {
h.logger.Error(err)
}
return byt
}
func (h *handler) storeExist(store string, registry Registry) bool {
for _, s := range registry.Stores() {
if s.Name() == store {
return true
}
}
return false
}
func (h *handler) indexExist(index string, registry Registry) bool {
for _, idx := range registry.Indexes() {
if idx.String() == index {
return true
}
}
return false
}
func MakeEndpoints(host string, registry Registry, logger log.Logger) {
r := mux.NewRouter()
h := handler{
logger: logger,
}
r.HandleFunc(`/stores`, func(writer http.ResponseWriter, request *http.Request) {
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Access-Control-Allow-Origin", "*")
err := h.encodeStores(writer, registry.Stores())
if err != nil {
h.encodeError(err)
}
}).Methods(http.MethodGet)
r.HandleFunc(`/indexes`, func(writer http.ResponseWriter, request *http.Request) {
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Access-Control-Allow-Origin", "*")
err := h.encodeIndexes(writer, registry.Indexes())
if err != nil {
h.encodeError(err)
}
}).Methods(http.MethodGet)
r.HandleFunc(`/stores/{store}`, func(writer http.ResponseWriter, request *http.Request) {
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Access-Control-Allow-Origin", "*")
vars := mux.Vars(request)
store, ok := vars[`store`]
if !ok {
logger.Error(`unknown route parameter`)
return
}
if !h.storeExist(store, registry) {
res := h.encodeError(errors.New(`store dose not exist`))
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
}
stor, err := registry.Store(store)
if err != nil {
res := h.encodeError(err)
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
i, err := stor.GetAll(context.Background())
if err != nil {
res := h.encodeError(err)
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
}
err = h.encodeAll(writer, i)
if err != nil {
logger.Error(err)
}
}).Methods(http.MethodGet)
r.HandleFunc(`/stores/{store}/indexes`, func(writer http.ResponseWriter, request *http.Request) {
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Access-Control-Allow-Origin", "*")
vars := mux.Vars(request)
storeName, ok := vars[`store`]
if !ok {
res := h.encodeError(errors.New(`unknown route parameter`))
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
if !h.storeExist(storeName, registry) {
res := h.encodeError(errors.New(`index dose not exist`))
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
store, err := registry.Store(storeName)
if err != nil {
res := h.encodeError(err)
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
indexdStore, ok := store.(IndexedStore)
if !ok {
res := h.encodeError(errors.New(`not an IndexedStore`))
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
keys := indexdStore.Indexes()
var indexes []string
for _, key := range keys {
indexes = append(indexes, key.String())
}
err = json.NewEncoder(writer).Encode(indexes)
if err != nil {
logger.Error(err)
}
}).Methods(http.MethodGet)
r.HandleFunc(`/stores/{store}/indexes/{index}`, func(writer http.ResponseWriter, request *http.Request) {
writer.Header().Set("Content-Type", "application/json")
writer.Header().Set("Access-Control-Allow-Origin", "*")
vars := mux.Vars(request)
storeName, ok := vars[`store`]
if !ok {
logger.Error(`unknown route parameter`)
return
}
indexName, ok := vars[`index`]
if !ok {
logger.Error(`unknown route parameter`)
return
}
if !h.storeExist(storeName, registry) {
res := h.encodeError(errors.New(`store dose not exist`))
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
}
if !h.indexExist(indexName, registry) {
res := h.encodeError(errors.New(`index dose not exist`))
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
}
store, err := registry.Store(storeName)
if err != nil {
res := h.encodeError(err)
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
indexdStore, ok := store.(IndexedStore)
if !ok {
res := h.encodeError(errors.New(`not an IndexedStore`))
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
idx, err := indexdStore.GetIndex(request.Context(), indexName)
if err != nil {
res := h.encodeError(err)
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
encoded := make(map[string][]interface{})
for key, values := range idx.Values() {
byt, err := json.Marshal(key)
if err != nil {
h.logger.Error(err)
continue
}
encoded[string(byt)] = values
}
err = json.NewEncoder(writer).Encode(encoded)
if err != nil {
logger.Error(err)
}
}).Methods(http.MethodGet)
r.HandleFunc(`/stores/{store}/{key}`, func(writer http.ResponseWriter, request *http.Request) {
writer.Header().Set("Content-Type", "application/json")
vars := mux.Vars(request)
store, ok := vars[`store`]
if !ok {
logger.Error(`unknown route parameter`)
return
}
if !h.storeExist(store, registry) {
res := h.encodeError(errors.New(`store dose not exist`))
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
}
key, ok := vars[`key`]
if !ok {
logger.Error(`unknown route parameter`)
return
}
keyByte := []byte(key)
stor, err := registry.Store(store)
if err != nil {
res := h.encodeError(err)
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
return
}
decodedKey, err := stor.KeyEncoder().Decode(keyByte)
//@FIXME
//keyInt, err := strconv.Atoi(key)
if err != nil {
return
}
data, err := stor.Get(context.Background(), decodedKey)
if err != nil {
res := h.encodeError(err)
if _, err := writer.Write(res); err != nil {
logger.Error(err)
return
}
}
err = h.encodeItem(writer, key, data)
if err != nil {
logger.Error(err)
}
}).Methods(http.MethodGet)
go func() {
err := http.ListenAndServe(host, handlers.CORS()(r))
if err != nil {
logger.Error(`k-stream.Store.Http`,
fmt.Sprintf(`Cannot start web server : %+v`, err))
}
}()
logger.Info(fmt.Sprintf(`Http server started on %s`, host))
}
<|start_filename|>producer/producer.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package producer
import (
"context"
"fmt"
"github.com/Shopify/sarama"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/data"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"time"
)
type Builder func(configs *Config) (Producer, error)
type RequiredAcks int
const (
// NoResponse doesn't send any response, the TCP ACK is all you get.
NoResponse RequiredAcks = 0
// WaitForLeader waits for only the local commit to succeed before responding.
WaitForLeader RequiredAcks = 1
// WaitForAll waits for all in-sync replicas to commit before responding.
// The minimum number of in-sync replicas is configured on the broker via
// the `min.insync.replicas` configuration key.
WaitForAll RequiredAcks = -1
)
func (ack RequiredAcks) String() string {
a := `NoResponse`
if ack == WaitForLeader {
a = `WaitForLeader`
}
if ack == WaitForAll {
a = `WaitForAll`
}
return a
}
type Partitioner int
const (
HashBased Partitioner = iota
Manual
Random
)
type Producer interface {
Produce(ctx context.Context, message *data.Record) (partition int32, offset int64, err error)
ProduceBatch(ctx context.Context, messages []*data.Record) error
Close() error
}
type saramaProducer struct {
id string
config *Config
saramaProducer sarama.SyncProducer
logger log.Logger
metrics *metricsReporter
}
type metricsReporter struct {
produceLatency metrics.Observer
batchProduceLatency metrics.Observer
}
func NewProducer(configs *Config) (Producer, error) {
if err := configs.validate(); err != nil {
return nil, err
}
configs.Logger.Info(`saramaProducer [` + configs.Id + `] initiating...`)
prd, err := sarama.NewSyncProducer(configs.BootstrapServers, configs.Config)
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`[%s] init failed`, configs.Id))
}
defer configs.Logger.Info(`saramaProducer [` + configs.Id + `] initiated`)
labels := []string{`topic`, `partition`}
return &saramaProducer{
id: configs.Id,
config: configs,
saramaProducer: prd,
logger: configs.Logger,
metrics: &metricsReporter{
produceLatency: configs.MetricsReporter.Observer(metrics.MetricConf{
Path: `k_stream_producer_produced_latency_microseconds`,
Labels: labels,
ConstLabels: map[string]string{`producer_id`: configs.Id},
}),
batchProduceLatency: configs.MetricsReporter.Observer(metrics.MetricConf{
Path: `k_stream_producer_batch_produced_latency_microseconds`,
Labels: append(labels, `size`),
ConstLabels: map[string]string{`producer_id`: configs.Id},
}),
},
}, nil
}
func (p *saramaProducer) Close() error {
defer p.logger.Info(fmt.Sprintf(`saramaProducer [%s] closed`, p.id))
return p.saramaProducer.Close()
}
func (p *saramaProducer) Produce(ctx context.Context, message *data.Record) (partition int32, offset int64, err error) {
t := time.Now()
m := &sarama.ProducerMessage{
Topic: message.Topic,
Key: sarama.ByteEncoder(message.Key),
Value: sarama.ByteEncoder(message.Value),
Timestamp: t,
}
for _, header := range message.Headers.All() {
m.Headers = append(m.Headers, *header)
}
if !message.Timestamp.IsZero() {
m.Timestamp = message.Timestamp
}
if message.Partition > 0 {
m.Partition = message.Partition
}
pr, o, err := p.saramaProducer.SendMessage(m)
if err != nil {
return 0, 0, errors.WithPrevious(err, `cannot send message`)
}
p.metrics.produceLatency.Observe(float64(time.Since(t).Nanoseconds()/1e3), map[string]string{
`topic`: message.Topic,
`partition`: fmt.Sprint(pr),
})
p.logger.TraceContext(ctx, fmt.Sprintf("Delivered message to topic %s [%d] at offset %d",
message.Topic, pr, o))
return pr, o, nil
}
func (p *saramaProducer) ProduceBatch(ctx context.Context, messages []*data.Record) error {
t := time.Now()
saramaMessages := make([]*sarama.ProducerMessage, 0, len(messages))
for _, message := range messages {
m := &sarama.ProducerMessage{
Topic: message.Topic,
Key: sarama.ByteEncoder(message.Key),
Value: sarama.ByteEncoder(message.Value),
Timestamp: t,
}
for _, header := range message.Headers {
m.Headers = append(m.Headers, *header)
}
if !message.Timestamp.IsZero() {
m.Timestamp = message.Timestamp
}
if message.Partition > 0 {
m.Partition = message.Partition
}
saramaMessages = append(saramaMessages, m)
}
err := p.saramaProducer.SendMessages(saramaMessages)
if err != nil {
return errors.WithPrevious(err, `cannot produce batch`)
}
partition := fmt.Sprint(messages[0].Partition)
p.metrics.batchProduceLatency.Observe(float64(time.Since(t).Nanoseconds()/1e3), map[string]string{
`topic`: messages[0].Topic,
`partition`: partition,
`size`: fmt.Sprint(len(messages)),
})
p.logger.TraceContext(ctx, fmt.Sprintf("message bulk delivered %s[%s]", messages[0].Topic, partition))
return nil
}
<|start_filename|>examples/example_2/domain/variables.go<|end_filename|>
package domain
const (
ABCTopic = `common.abc`
)
<|start_filename|>kstream/processor_pool.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package kstream
import (
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/kstream/changelog"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"sync"
)
type processorPool struct {
id string
processors map[consumer.TopicPartition]*processor
mu *sync.Mutex
topologies map[string]*kStream
logger log.Logger
metrics metrics.Reporter
changelog changelog.Builder
}
func newProcessorPool(id string, flows map[string]*kStream, changelog changelog.Builder, logger log.Logger, reporter metrics.Reporter) *processorPool {
return &processorPool{
id: id,
processors: make(map[consumer.TopicPartition]*processor),
mu: &sync.Mutex{},
topologies: flows,
logger: logger,
metrics: reporter,
changelog: changelog,
}
}
func (p *processorPool) Processor(tp consumer.TopicPartition) *processor {
p.mu.Lock()
defer p.mu.Unlock()
return p.processors[tp]
}
func (p *processorPool) addProcessor(tp consumer.TopicPartition) error {
processorId := fmt.Sprintf(`%s_%s_%d`, p.id, tp.Topic, tp.Partition)
processor, err := newProcessor(processorId, tp, p.changelog, p.logger, p.metrics)
if err != nil {
return errors.WithPrevious(err, `cannot start stream processor`)
}
processor.topologyBuilder = p.topologies[tp.Topic].topology
//processor.changelogEnabled = p.topologies[tp.Topic].changelog.enabled
processor.taskPoolConfig = p.topologies[tp.Topic].config.workerPool
p.processors[tp] = processor
return nil
}
func (p *processorPool) Stop() {
p.mu.Lock()
defer p.mu.Unlock()
for _, processor := range p.processors {
processor.Stop()
}
}
func (p *processorPool) Remove(tp consumer.TopicPartition) {
p.mu.Lock()
defer p.mu.Unlock()
p.processors[tp].Stop()
logger.Info(
fmt.Sprintf(`processor for %s stopped`, tp.String()))
}
<|start_filename|>kstream/global_table.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package kstream
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/kstream/kstream/processors"
"github.com/tryfix/kstream/kstream/processors/join"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/log"
)
// Starting offset for the global table partition.
type GlobalTableOffset int64
// GlobalTableOffsetDefault defines the starting offset for the GlobalTable when GlobalTable stream syncing started.
const GlobalTableOffsetDefault GlobalTableOffset = 0
// GlobalTableOffsetLatest defines the beginning of the partition.
// Suitable for topics with retention policy delete since the topic can contains historical data.
const GlobalTableOffsetLatest GlobalTableOffset = -1
// globalTableStoreWriter overrides the persistence logic for GlobalTables.
var globalTableStoreWriter = func(r *data.Record, st store.Store) error {
if _, ok := st.(store.IndexedStore); ok {
return globalTableIndexedStoreWriter(r, st)
}
// tombstone handling
if r.Value == nil {
return st.Backend().Delete(r.Key)
}
return st.Backend().Set(r.Key, r.Value, 0)
}
var globalTableIndexedStoreWriter = func(r *data.Record, store store.Store) error {
k, err := store.KeyEncoder().Decode(r.Key)
if err != nil {
return errors.WithPrevious(err, `indexable-store-writer key decode error`)
}
if r.Value == nil {
return store.Delete(context.Background(), k)
}
v, err := store.ValEncoder().Decode(r.Value)
if err != nil {
return errors.WithPrevious(err, `indexable-store-writer value decode error`)
}
return store.Set(context.Background(), k, v, 0)
}
type globalTableOptions struct {
initialOffset GlobalTableOffset
logger log.Logger
backendWriter StoreWriter
recordVersionExtractor RecordVersionExtractor
recordVersionComparator RecordVersionComparator
}
type GlobalTableOption func(options *globalTableOptions)
// GlobalTableWithOffset overrides the default starting offset when GlobalTable syncing started.
func GlobalTableWithOffset(offset GlobalTableOffset) GlobalTableOption {
return func(options *globalTableOptions) {
options.initialOffset = offset
}
}
// GlobalTableWithLogger overrides the default logger for the GlobalTable (default is NoopLogger).
func GlobalTableWithLogger(logger log.Logger) GlobalTableOption {
return func(options *globalTableOptions) {
options.logger = logger
}
}
// GlobalTableWithVersionExtractor adds the version extractor for the GlobalTable from past records (default is nil).
func GlobalTableWithVersionExtractor(extractor RecordVersionExtractor) GlobalTableOption {
return func(options *globalTableOptions) {
options.recordVersionExtractor = extractor
}
}
// GlobalTableWithVersionComparator adds the version extractor for the GlobalTable from past records (default is nil).
func GlobalTableWithVersionComparator(comparator RecordVersionComparator) GlobalTableOption {
return func(options *globalTableOptions) {
options.recordVersionComparator = comparator
}
}
// GlobalTableWithBackendWriter overrides the persisting behavior of the GlobalTable.
// eg :
// func(r *data.Record, store store.Store) error {
// // tombstone handling
// if r.Value == nil {
// if err := store.Backend().Delete(r.Key); err != nil {
// return err
// }
// }
//
// return store.Backend().Set(r.Key, r.Value, 0)
// }
func GlobalTableWithBackendWriter(writer StoreWriter) GlobalTableOption {
return func(options *globalTableOptions) {
options.backendWriter = writer
}
}
type GlobalTable interface {
Stream
}
type globalKTable struct {
*kStream
storeName string
store store.Store
options *globalTableOptions
}
func (t *globalKTable) To(topic string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...SinkOption) {
panic(`global table dose not support stream processing`)
}
func (t *globalKTable) Transform(transformer processors.TransFunc) Stream {
panic(`global table dose not support stream transforming`)
}
func (t *globalKTable) Filter(filter processors.FilterFunc) Stream {
panic(`global table dose not support stream processing`)
}
func (t *globalKTable) Process(processor processors.ProcessFunc) Stream {
panic(`global table dose not support stream processing`)
}
func (t *globalKTable) Join(stream Stream, keyMapper join.KeyMapper, valMapper join.ValueMapper) Stream {
panic(`global table to global table joins are not supported yet`)
}
<|start_filename|>kstream/processors/processor_test.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package processors
import (
"context"
"testing"
)
var p ProcessFunc = func(ctx context.Context, key interface{}, value interface{}) error {
return nil
}
func TestProcessFunc_Process(t *testing.T) {
if err := p(context.Background(), nil, nil); err != nil {
t.Fail()
}
}
<|start_filename|>kstream/changelog/replica_manager.go<|end_filename|>
package changelog
import (
"fmt"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/kstream/offsets"
"github.com/tryfix/log"
"sync"
)
type ReplicaManager struct {
replicas map[string]*replicaSyncer // map[tp]syncer
mu *sync.RWMutex
offsetManager offsets.Manager
conf *ReplicaManagerConf
cacheManager *cacheManager
logger log.Logger
}
type ReplicaManagerConf struct {
Consumer consumer.PartitionConsumerBuilder
Backend backend.Builder
Logger log.Logger
Tps []consumer.TopicPartition
OffsetManager offsets.Manager
}
func NewReplicaManager(conf *ReplicaManagerConf) (*ReplicaManager, error) {
rm := &ReplicaManager{
replicas: make(map[string]*replicaSyncer),
mu: new(sync.RWMutex),
offsetManager: conf.OffsetManager,
conf: conf,
logger: conf.Logger.NewLog(log.Prefixed(`replica-manager`)),
}
cacheManager, err := newCacheManager(conf.Backend)
if err != nil {
return nil, err
}
rm.cacheManager = cacheManager
for _, tp := range conf.Tps {
replica, err := rm.buildReplica(tp)
if err != nil {
return nil, err
}
rm.replicas[tp.String()] = replica
}
return rm, nil
}
func (m *ReplicaManager) StartReplicas(tps []consumer.TopicPartition) error {
m.logger.Info(`starting replica syncers...`)
wg := new(sync.WaitGroup)
for _, tp := range tps {
wg.Add(1)
if err := m.startReplica(tp, wg); err != nil {
return err
}
}
wg.Wait()
return nil
}
func (m *ReplicaManager) GetCache(tp consumer.TopicPartition) (*cache, error) {
m.mu.Lock()
defer m.mu.Unlock()
cache, err := m.cacheManager.getCache(tp)
if err != nil {
return nil, err
}
return cache, nil
}
func (m *ReplicaManager) startReplica(tp consumer.TopicPartition, wg *sync.WaitGroup) error {
m.logger.Info(fmt.Sprintf(`starting replica syncer [%s]...`, tp))
replica := m.replicas[tp.String()]
localCached, err := replica.cache.LastSynced()
if err != nil {
return err
}
valid, err := m.offsetManager.OffsetValid(tp.Topic, tp.Partition, localCached)
if err != nil {
return err
}
startingOffset := localCached
if !valid {
broker, err := m.offsetManager.GetOffsetOldest(tp.Topic, tp.Partition)
if err != nil {
return err
}
startingOffset = broker
}
if valid {
startingOffset = localCached
m.logger.Info(fmt.Sprintf(`local cache [%d] found for [%s]`, localCached, tp))
} else {
m.logger.Info(fmt.Sprintf(`local cache invalid for [%s] flushing...`, tp))
if err := replica.cache.Flush(); err != nil {
return err
}
m.logger.Info(fmt.Sprintf(`local cache flushed for [%s]`, tp))
}
started, errs := replica.Sync(startingOffset)
go func() {
for err := range errs {
m.logger.Fatal(err)
}
}()
<-started
wg.Done()
m.logger.Info(fmt.Sprintf(`started replica syncer [%s]`, tp))
return nil
}
func (m *ReplicaManager) buildReplica(tp consumer.TopicPartition) (*replicaSyncer, error) {
c, err := m.conf.Consumer.Build(consumer.BuilderWithId(fmt.Sprintf(`changelog_%s_replica_consumer`, tp)))
if err != nil {
return nil, err
}
if rep, ok := m.replicas[tp.String()]; ok {
rep.consumer = c
return rep, nil
}
cache, err := m.cacheManager.getCache(tp)
if err != nil {
return nil, err
}
return &replicaSyncer{
cache: cache,
tp: tp,
consumer: c,
logger: m.logger.NewLog(log.Prefixed(fmt.Sprintf(`sync-%s-%d`, tp.Topic, tp.Partition))),
running: true,
}, nil
}
func (m *ReplicaManager) StopReplicas(tps []consumer.TopicPartition) error {
m.mu.Lock()
defer m.mu.Unlock()
m.logger.Info(`stopping replica sync...`)
for _, tp := range tps {
r := m.replicas[tp.String()]
if err := r.Stop(); err != nil {
m.logger.Error(err)
}
}
m.logger.Info(`replica sync running`)
return nil
}
<|start_filename|>kstream/processors/join/side_joiner.go<|end_filename|>
package join
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/topology"
)
type SideJoiner struct {
Id int32
Side string
LeftWindow *Window
RightWindow *Window
ValueMapper ValueMapper
childs []topology.Node
childBuilders []topology.NodeBuilder
}
func (sj *SideJoiner) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range sj.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &SideJoiner{
Side: sj.Side,
LeftWindow: sj.LeftWindow,
RightWindow: sj.RightWindow,
ValueMapper: sj.ValueMapper,
childs: childs,
Id: sj.Id,
}, nil
}
func (sj *SideJoiner) ChildBuilders() []topology.NodeBuilder {
return sj.childBuilders
}
func (sj *SideJoiner) AddChildBuilder(builder topology.NodeBuilder) {
sj.childBuilders = append(sj.childBuilders, builder)
}
func (sj *SideJoiner) Next() bool {
return true
}
func (sj *SideJoiner) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error) {
var joinedValue interface{}
switch sj.Side {
case `left`:
v, ok := sj.RightWindow.Read(kIn)
if !ok {
sj.LeftWindow.Write(kIn, vIn)
return nil, nil, false, nil
}
joinedValue, err = sj.ValueMapper(vIn, v)
if err != nil {
return nil, nil, false, errors.WithPrevious(err,
`value mapper failed`)
}
case `right`:
v, ok := sj.LeftWindow.Read(kIn)
if !ok {
sj.RightWindow.Write(kIn, vIn)
return nil, nil, false, nil
}
joinedValue, err = sj.ValueMapper(v, vIn)
if err != nil {
return nil, nil, false, errors.WithPrevious(err,
`value mapper failed`)
}
default:
return nil, nil, false, errors.New(`stream joiner sides should be only "left" and "right"`)
}
for _, child := range sj.childs {
_, _, _, err := child.Run(ctx, kIn, joinedValue)
if err != nil {
return nil, nil, false, err
}
}
return kIn, joinedValue, true, nil
}
func (sj *SideJoiner) Type() topology.Type {
return topology.Type(sj.Side + `_side_joiner`)
}
func (sj *SideJoiner) Childs() []topology.Node {
return sj.childs
}
func (sj *SideJoiner) AddChild(node topology.Node) {
sj.childs = append(sj.childs, node)
}
func (sj *SideJoiner) ID() int32 {
return sj.Id
}
<|start_filename|>kstream/processors/join/global_table_star_joiner.go<|end_filename|>
package join
/*import (
"context"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/store"
"sync"
)
type GlobalTableStarJoiner struct {
//Topic string
Joins []GlobalTableJoiner
}
func (j *GlobalTableStarJoiner) Join(ctx context.Context, key interface{}, leftVal interface{}) (joinedVal interface{}, err error) {
wg := &sync.WaitGroup{}
for _, join := range j.Joins{
}
return valJoined, nil
}
func (j *GlobalTableJoiner) Process(ctx context.Context, key interface{}, value interface{}) (interface{}, interface{}, error) {
v, err := j.Join(ctx, key, value)
return key, v, err
}
func (j *GlobalTableJoiner) Name() string {
return j.Store
}
func (j *GlobalTableJoiner) Type() string {
return `GlobalTableJoiner`
}*/
<|start_filename|>kstream/store/store_test.go<|end_filename|>
package store
import (
"context"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/kstream/encoding"
"testing"
"time"
)
func makeTestStore(expiry time.Duration) Store {
return &store{
backend: backend.NewMockBackend(`test_backend`, expiry),
name: `test_store`,
keyEncoder: encoding.IntEncoder{},
valEncoder: encoding.StringEncoder{},
}
}
func TestDefaultStore_Get(t *testing.T) {
ctx := context.Background()
st := makeTestStore(0)
testValue := `test_value`
err := st.Set(ctx, 100, testValue, 0)
if err != nil {
t.Error(err)
return
}
v, err := st.Get(ctx, 100)
if err != nil {
t.Error(err)
}
if v != testValue {
t.Fail()
}
}
func TestDefaultStore_Get_Should_return_Nul_For_Invalid_Key(t *testing.T) {
ctx := context.Background()
st := makeTestStore(0)
testValue := `test_value`
testKey := 100
err := st.Set(ctx, testKey, testValue, 0)
if err != nil {
t.Error(err)
return
}
v, err := st.Get(ctx, 200)
if err != nil {
t.Error(err)
}
if v != nil {
t.Fail()
}
}
func TestDefaultStore_Set(t *testing.T) {
ctx := context.Background()
st := makeTestStore(0)
testValue := `test_value`
testKey := 100
err := st.Set(ctx, testKey, testValue, 0)
if err != nil {
t.Error(err)
return
}
v, err := st.Get(ctx, testKey)
if err != nil {
t.Error(err)
}
if v != testValue {
t.Fail()
}
}
func TestDefaultStore_Delete(t *testing.T) {
ctx := context.Background()
st := makeTestStore(0)
testValue := `test_value`
testKey := 100
err := st.Set(ctx, testKey, testValue, 0)
if err != nil {
t.Error(err)
return
}
v, err := st.Get(ctx, testKey)
if err != nil {
t.Error(err)
}
if v != testValue {
t.Fail()
}
}
func TestDefaultStore_Set_Record_Expiry(t *testing.T) {
ctx := context.Background()
st := makeTestStore(0)
testValue := `test_value`
testKey := 100
expiry := 100 * time.Millisecond
err := st.Set(ctx, testKey, testValue, expiry)
if err != nil {
t.Error(err)
return
}
time.Sleep(expiry * 2)
v, err := st.Get(ctx, testKey)
if err != nil {
t.Error(err)
}
if v != nil {
t.Fail()
}
}
func TestDefaultStore_Set_Store_Expiry(t *testing.T) {
ctx := context.Background()
expiry := 100 * time.Millisecond
st := makeTestStore(expiry)
testValue := `test_value`
testKey := 100
err := st.Set(ctx, testKey, testValue, 0)
if err != nil {
t.Error(err)
return
}
time.Sleep(expiry * 2)
v, err := st.Get(ctx, testKey)
if err != nil {
t.Error(err)
}
if v != nil {
t.Fail()
}
}
<|start_filename|>kstream/global_table_stream_test.go<|end_filename|>
package kstream
import (
"context"
"fmt"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/backend/memory"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/kstream/kstream/offsets"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/kstream/producer"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"strconv"
"sync"
"testing"
"time"
)
func TestGlobalTableStream_StartStreams(t *testing.T) {
initStream := func(startOffset GlobalTableOffset) (*globalTableStream, func(expectedCount int), func(start int, end int)) {
mocksTopics := admin.NewMockTopics()
kafkaAdmin := &admin.MockKafkaAdmin{
Topics: mocksTopics,
}
offsetManager := &offsets.MockManager{Topics: mocksTopics}
topics := make(map[string]*admin.Topic)
stores := make(map[string]store.Store)
tables := make(map[string]*globalKTable)
opts := new(globalTableOptions)
opts.backendWriter = globalTableStoreWriter
opts.initialOffset = startOffset
for i := 0; i < 1; i++ {
name := fmt.Sprintf(`topic%d`, i)
topics[name] = &admin.Topic{
Name: name,
NumPartitions: 100,
ReplicationFactor: 1,
}
conf := memory.NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
stor, _ := store.NewStore(name, encoding.StringEncoder{}, encoding.StringEncoder{}, store.WithBackend(memory.NewMemoryBackend(conf)))
stores[name] = stor
tables[name] = &globalKTable{store: stor, storeName: stor.Name(), options: opts}
}
if err := kafkaAdmin.CreateTopics(topics); err != nil {
t.Error(err)
}
gTableStream, err := newGlobalTableStream(tables, &GlobalTableStreamConfig{
ConsumerBuilder: consumer.NewMockPartitionConsumerBuilder(mocksTopics, offsetManager),
BackendBuilder: memory.Builder(memory.NewConfig()),
OffsetManager: offsetManager,
KafkaAdmin: kafkaAdmin,
Metrics: metrics.NoopReporter(),
Logger: log.NewNoopLogger(),
})
if err != nil {
t.Error(err)
}
assertFunc := func(expectedCount int) {
count := 0
for _, str := range stores {
i, _ := str.GetAll(nil)
for i.Valid() {
count++
i.Next()
}
}
if count != expectedCount*len(topics) {
t.Error(fmt.Sprintf(`invalid count have [%d]`, count))
t.Fail()
}
}
p := producer.NewMockProducer(mocksTopics)
producerFunc := func(start int, count int) {
for i := start; i <= count; i++ {
for topic := range topics {
_, _, _ = p.Produce(nil, &data.Record{
Key: []byte(fmt.Sprint(i)),
Value: []byte(`v`),
Topic: topic,
})
}
}
}
return gTableStream, assertFunc, producerFunc
}
t.Run(`NoMessage`, func(t *testing.T) {
gTableStream, assertFunc, producerFunc := initStream(GlobalTableOffsetLatest)
producerFunc(0, 0)
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
assertFunc(0)
})
t.Run(`Latest`, func(t *testing.T) {
gTableStream, assertFunc, producerFunc := initStream(GlobalTableOffsetLatest)
producerFunc(0, 3333)
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
assertFunc(0)
})
t.Run(`Oldest`, func(t *testing.T) {
gTableStream, assertFunc, producerFunc := initStream(GlobalTableOffsetDefault)
producerFunc(0, 3332)
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
assertFunc(3333)
})
t.Run(`OldestAfterStarted`, func(t *testing.T) {
gTableStream, assertFunc, producerFunc := initStream(GlobalTableOffsetDefault)
producerFunc(0, 3332)
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
time.Sleep(1 * time.Second)
producerFunc(3333, 6665)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
assertFunc(6666)
})
t.Run(`LatestAfterStarted`, func(t *testing.T) {
gTableStream, assertFunc, producerFunc := initStream(GlobalTableOffsetLatest)
producerFunc(0, 3332)
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
time.Sleep(1 * time.Second)
producerFunc(3334, 6666)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
assertFunc(3333)
})
}
func TestGlobalKTable_Process(t *testing.T) {
initStream := func(opts *globalTableOptions) (*globalTableStream, store.Store, func(key, value []byte)) {
mocksTopics := admin.NewMockTopics()
kafkaAdmin := &admin.MockKafkaAdmin{
Topics: mocksTopics,
}
offsetManager := &offsets.MockManager{Topics: mocksTopics}
topics := make(map[string]*admin.Topic)
stores := make(map[string]store.Store)
tables := make(map[string]*globalKTable)
name := fmt.Sprintf(`topic_test_gt_process`)
topics[name] = &admin.Topic{
Name: name,
NumPartitions: 100,
ReplicationFactor: 1,
}
conf := memory.NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
stor, _ := store.NewStore(name, encoding.StringEncoder{}, encoding.StringEncoder{}, store.WithBackend(memory.NewMemoryBackend(conf)))
stores[name] = stor
tables[name] = &globalKTable{store: stor, storeName: stor.Name(), options: opts}
if err := kafkaAdmin.CreateTopics(topics); err != nil {
t.Error(err)
}
gTableStream, err := newGlobalTableStream(tables, &GlobalTableStreamConfig{
ConsumerBuilder: consumer.NewMockPartitionConsumerBuilder(mocksTopics, offsetManager),
BackendBuilder: memory.Builder(memory.NewConfig()),
OffsetManager: offsetManager,
KafkaAdmin: kafkaAdmin,
Metrics: metrics.NoopReporter(),
Logger: log.NewNoopLogger(),
})
if err != nil {
t.Error(err)
}
p := producer.NewMockProducer(mocksTopics)
producerFunc := func(key, value []byte) {
for topic := range topics {
_, _, _ = p.Produce(nil, &data.Record{
Key: key,
Value: value,
Topic: topic,
})
}
}
return gTableStream, stor, producerFunc
}
t.Run(`TestVersioningNoPreviousRecords`, func(t *testing.T) {
opts := new(globalTableOptions)
opts.backendWriter = globalTableStoreWriter
opts.recordVersionExtractor = func(ctx context.Context, key, value interface{}) (int64, error) {
val := value.(string)
return strconv.ParseInt(val, 10, 64)
}
opts.recordVersionComparator = func(newVersion, currentVersion int64) bool {
return newVersion > currentVersion
}
opts.initialOffset = GlobalTableOffsetDefault
gTableStream, stor, producerFunc := initStream(opts)
//all records should sink because there is no previous records to be compared. Record count should be 2
producerFunc([]byte(`1`), []byte(`1`))
producerFunc([]byte(`2`), []byte(`2`))
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
time.Sleep(1 * time.Second)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
count := 0
i, _ := stor.GetAll(context.Background())
for i.Valid() {
count++
i.Next()
}
if count != 2 {
t.Error(fmt.Sprintf(`invalid count have [%d]`, count))
t.Fail()
}
})
t.Run(`TestVersioningSinkingCorrectOrder`, func(t *testing.T) {
opts := new(globalTableOptions)
opts.backendWriter = globalTableStoreWriter
opts.recordVersionExtractor = func(ctx context.Context, key, value interface{}) (int64, error) {
val := value.(string)
return strconv.ParseInt(val, 10, 64)
}
opts.recordVersionComparator = func(newVersion, currentVersion int64) bool {
return newVersion > currentVersion
}
opts.initialOffset = GlobalTableOffsetDefault
gTableStream, stor, producerFunc := initStream(opts)
//all records should sink and last stored value for the key =1 should be 7 because all records versions are in order
producerFunc([]byte(`1`), []byte(`1`))
producerFunc([]byte(`1`), []byte(`5`))
producerFunc([]byte(`1`), []byte(`7`))
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
time.Sleep(1 * time.Second)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
value, _ := stor.Get(context.Background(), `1`)
if value != `7` {
t.Error(fmt.Sprintf(`invalid value have [%s] expected [7]`, value))
t.Fail()
}
})
t.Run(`TestVersioningSinkingIncorrectOrder`, func(t *testing.T) {
opts := new(globalTableOptions)
opts.backendWriter = globalTableStoreWriter
opts.recordVersionExtractor = func(ctx context.Context, key, value interface{}) (int64, error) {
val := value.(string)
return strconv.ParseInt(val, 10, 64)
}
opts.recordVersionComparator = func(newVersion, currentVersion int64) bool {
return newVersion > currentVersion
}
opts.initialOffset = GlobalTableOffsetDefault
gTableStream, stor, producerFunc := initStream(opts)
//last record should not sink and last stored value should be 5 because last record's version is not greater than 5
producerFunc([]byte(`1`), []byte(`1`))
producerFunc([]byte(`1`), []byte(`5`))
producerFunc([]byte(`1`), []byte(`2`))
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
time.Sleep(1 * time.Second)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
value, _ := stor.Get(context.Background(), `1`)
if value != `5` {
t.Error(fmt.Sprintf(`invalid value have [%s] expected [5]`, value))
t.Fail()
}
})
t.Run(`TestVersioningDeleteAndReSink`, func(t *testing.T) {
opts := new(globalTableOptions)
opts.backendWriter = globalTableStoreWriter
opts.recordVersionExtractor = func(ctx context.Context, key, value interface{}) (int64, error) {
val := value.(string)
return strconv.ParseInt(val, 10, 64)
}
opts.recordVersionComparator = func(newVersion, currentVersion int64) bool {
return newVersion > currentVersion
}
opts.initialOffset = GlobalTableOffsetDefault
gTableStream, stor, producerFunc := initStream(opts)
/*after delete previous records, new value's version no need to compare with previous versions because
no previous records in store for that key
*/
producerFunc([]byte(`1`), []byte(`1`))
producerFunc([]byte(`1`), []byte(`5`))
producerFunc([]byte(`1`), nil) // to delete store value for key = `1`
producerFunc([]byte(`1`), []byte(`2`))
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
time.Sleep(1 * time.Second)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
value, _ := stor.Get(context.Background(), `1`)
if value != `2` {
t.Error(fmt.Sprintf(`invalid value have [%s] expected [2]`, value))
t.Fail()
}
})
t.Run(`TestVersioningNoVersionExtractor`, func(t *testing.T) {
opts := new(globalTableOptions)
opts.backendWriter = globalTableStoreWriter
opts.initialOffset = GlobalTableOffsetDefault
gTableStream, stor, producerFunc := initStream(opts)
//record should be sink in incorrect order if there is no version extractor
producerFunc([]byte(`1`), []byte(`1`))
producerFunc([]byte(`1`), []byte(`5`))
producerFunc([]byte(`1`), []byte(`2`))
wg := &sync.WaitGroup{}
gTableStream.StartStreams(wg)
time.Sleep(1 * time.Second)
go func() {
time.Sleep(1 * time.Second)
gTableStream.stop()
}()
wg.Wait()
value, _ := stor.Get(context.Background(), `1`)
if value != `2` {
t.Error(fmt.Sprintf(`invalid value have [%s] expected [2]`, value))
t.Fail()
}
})
}
<|start_filename|>kstream/encoding/json_encoder.go<|end_filename|>
package encoding
import "encoding/json"
type JsonSerializer struct{}
func NewJsonSerDes() *JsonSerializer {
return &JsonSerializer{}
}
func (s *JsonSerializer) Encode(data interface{}) ([]byte, error) {
return json.Marshal(data)
}
func (s *JsonSerializer) Decode(byt []byte, v interface{}) error {
return json.Unmarshal(byt, &v)
}
<|start_filename|>kstream/worker_pool/pool.go<|end_filename|>
package worker_pool
import (
"context"
"fmt"
"github.com/tryfix/kstream/kstream/topology"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"hash"
"hash/fnv"
"math/rand"
"time"
)
type ExecutionOrder int
const (
OrderRandom ExecutionOrder = iota
OrderByKey
OrderPreserved
)
func (eo ExecutionOrder) String() string {
order := `OrderRandom`
if eo == OrderByKey {
return `OrderByKey`
}
if eo == OrderPreserved {
return `OrderPreserved`
}
return order
}
type task struct {
ctx context.Context
key []byte
val []byte
doneClb func()
}
type PoolConfig struct {
NumOfWorkers int
WorkerBufferSize int
Order ExecutionOrder
}
type Pool struct {
id string
topology *topology.TopologyBuilder
size int64
workers []*worker
logger log.Logger
order ExecutionOrder
stopped chan bool
hasher hash.Hash32
}
func NewPool(id string, tb *topology.TopologyBuilder, metricsReporter metrics.Reporter, logger log.Logger, config *PoolConfig) *Pool {
p := &Pool{
id: id,
topology: tb,
size: int64(config.NumOfWorkers),
order: config.Order,
logger: logger.NewLog(log.Prefixed(`pool`)),
workers: make([]*worker, config.NumOfWorkers),
hasher: fnv.New32a(),
stopped: make(chan bool, 1),
}
bufferUsage := metricsReporter.Counter(metrics.MetricConf{
Path: `k_stream_task_pool_worker_buffer`,
Labels: []string{`pool_id`},
})
for i := int64(config.NumOfWorkers) - 1; i >= 0; i-- {
t, err := tb.Build()
if err != nil {
p.logger.Fatal(`k-stream.streamProcessor`, err)
}
w := &worker{
topology: t,
pool: p,
logger: p.logger.NewLog(log.Prefixed(fmt.Sprintf(`worker-%d`, i))),
tasks: make(chan task, config.WorkerBufferSize),
bufferUsage: bufferUsage,
}
p.workers[i] = w
}
for _, w := range p.workers {
go w.start()
}
return p
}
func (p *Pool) Run(ctx context.Context, key, val []byte, doneClb func()) {
w, err := p.worker(key)
if err != nil {
p.logger.ErrorContext(ctx, `k-stream.task_pool`, err)
return
}
w.tasks <- task{
key: key,
ctx: ctx,
val: val,
doneClb: doneClb,
}
/*select {
case w.tasks <- t:
break
default:
p.logger.Debug(`k-stream.task_pool`, `worker buffer full`)
break
}*/
}
func (p *Pool) Stop() {
for _, w := range p.workers {
w.stop()
}
}
func (p *Pool) worker(key []byte) (*worker, error) {
var worker *worker
var w int64
if p.order == OrderRandom && p.size > 1 {
w = int64(rand.Int63n(p.size - 1))
return p.workers[w], nil
}
if p.order == OrderByKey {
p.hasher.Reset()
_, err := p.hasher.Write(key)
if err != nil {
return worker, err
}
w = int64(p.hasher.Sum32()) % p.size
}
return p.workers[w], nil
}
type worker struct {
topology topology.Topology
tasks chan task
pool *Pool
logger log.Logger
bufferUsage metrics.Counter
}
func (w *worker) start() {
ticker := time.NewTicker(1 * time.Second)
defer ticker.Stop()
go func() {
for range ticker.C {
w.bufferUsage.Count((float64(len(w.tasks))/float64(cap(w.tasks)))*100, map[string]string{`pool_id`: w.pool.id})
}
}()
for task := range w.tasks {
_, _, err := w.topology.Run(task.ctx, task.key, task.val)
if err != nil {
w.logger.ErrorContext(task.ctx, `k-stream.task_pool`, err)
}
task.doneClb()
}
}
func (w *worker) stop() {
close(w.tasks)
w.pool = nil
w.bufferUsage = nil
}
<|start_filename|>producer/mock-producer.go<|end_filename|>
package producer
import (
"context"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/data"
"hash"
"hash/fnv"
)
type MockStreamProducer struct {
hasher hash.Hash32
topics *admin.Topics
}
func NewMockProducer(topics *admin.Topics) *MockStreamProducer {
return &MockStreamProducer{
hasher: fnv.New32a(),
topics: topics,
}
}
func (msp *MockStreamProducer) Produce(ctx context.Context, message *data.Record) (partition int32, offset int64, err error) {
msp.hasher.Reset()
_, err = msp.hasher.Write(message.Key)
if err != nil {
return partition, offset, err
}
topic, err := msp.topics.Topic(message.Topic)
if err != nil {
return partition, offset, err
}
p := int64(msp.hasher.Sum32()) % int64(len(topic.Partitions()))
pt, err := topic.Partition(int(p))
if err != nil {
return
}
message.Partition = int32(p)
if err = pt.Append(message); err != nil {
return
}
return int32(p), message.Offset, nil
}
func (msp *MockStreamProducer) ProduceBatch(ctx context.Context, messages []*data.Record) error {
for _, msg := range messages {
if _, _, err := msp.Produce(ctx, msg); err != nil {
return err
}
}
return nil
}
func (msp *MockStreamProducer) Close() error {
return nil
}
<|start_filename|>kstream/processors/materializer.go<|end_filename|>
package processors
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/kstream/kstream/topology"
)
type RecordVersionExtractor func(ctx context.Context, key, value interface{}) (version int64, err error)
type RecordVersionWriter func(ctx context.Context, version int64, vIn interface{}) (vOut interface{}, err error)
type Materializer struct {
Topic string
Id int32
Store string
VersionExtractor RecordVersionExtractor
VersionWriter RecordVersionWriter
store store.Store
Registry store.Registry
childBuilders []topology.NodeBuilder
childs []topology.Node
}
func NewMaterializeBuilder(topic, store string, registry store.Registry, id int32, options ...MaterializeOption) *Materializer {
builder := &Materializer{
Topic: topic,
Id: id,
Store: store,
VersionWriter: func(ctx context.Context, version int64, vIn interface{}) (vOut interface{}, err error) {
return vIn, nil
},
Registry: registry,
//VersionExtractor: func(key, value interface{}) (version int64, err error) {
// return 1, nil
//},
}
builder.applyOptions(options...)
return builder
}
func (m *Materializer) Build() (topology.Node, error) {
s, err := m.Registry.Store(m.Store)
if err != nil || s == nil {
return nil, errors.New(`store [` + m.Store + `] dose not exist`)
}
m.store = s
var childs []topology.Node
for _, childBuilder := range m.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
m.childs = childs
return m, nil
}
func (m *Materializer) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error) {
vOut = vIn
if m.VersionExtractor != nil {
storeValue, err := m.store.Get(ctx, kIn)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `materializer store read error`)
}
newVersion, err := m.VersionExtractor(ctx, kIn, storeValue)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `materializer version extractor error`)
}
vOut, err = m.VersionWriter(ctx, newVersion+1, vIn)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `materializer version writer error`)
}
}
err = m.store.Set(ctx, kIn, vOut, 0)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `materializer store write error`)
}
for _, child := range m.childs {
_, _, next, err := child.Run(ctx, kIn, vOut)
if err != nil || !next {
return nil, nil, false, err
}
}
return kIn, vOut, true, err
}
func (m *Materializer) ChildBuilders() []topology.NodeBuilder {
return m.childBuilders
}
func (m *Materializer) AddChildBuilder(builder topology.NodeBuilder) {
m.childBuilders = append(m.childBuilders, builder)
}
func (m *Materializer) Type() topology.Type {
return topology.TypeMaterialize
}
func (m *Materializer) Childs() []topology.Node {
return m.childs
}
func (m *Materializer) AddChild(node topology.Node) {
m.childs = append(m.childs, node)
}
type MaterializeOption func(sink *Materializer)
func (m *Materializer) applyOptions(options ...MaterializeOption) {
for _, option := range options {
option(m)
}
}
func WithVersionExtractor(ve RecordVersionExtractor) MaterializeOption {
return func(mat *Materializer) {
mat.VersionExtractor = ve
}
}
func WithVersionWriter(vi RecordVersionWriter) MaterializeOption {
return func(mat *Materializer) {
mat.VersionWriter = vi
}
}
<|start_filename|>kstream/builder.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package kstream
import (
"fmt"
"github.com/Shopify/sarama"
saramaMetrics "github.com/rcrowley/go-metrics"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/kstream/changelog"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/kstream/kstream/graph"
"github.com/tryfix/kstream/kstream/offsets"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/kstream/producer"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"reflect"
"strings"
)
type StreamBuilder struct {
config *StreamBuilderConfig
streams map[string]*kStream // map[topic]topology
globalTables map[string]*globalKTable // map[topic]topology
storeRegistry store.Registry
graph *graph.Graph
logger log.Logger
metricsReporter metrics.Reporter
defaultBuilders *DefaultBuilders
topicBuilder *topicBuilder
changelogTopics map[string]*admin.Topic
changelogReplicaManager *changelog.ReplicaManager
}
type BuilderOption func(*DefaultBuilders)
func WithOffsetManager(offsetManager offsets.Manager) BuilderOption {
return func(builders *DefaultBuilders) {
builders.OffsetManager = offsetManager
}
}
func WithKafkaAdmin(kafkaAdmin admin.KafkaAdmin) BuilderOption {
return func(builders *DefaultBuilders) {
builders.KafkaAdmin = kafkaAdmin
}
}
func WithConsumerBuilder(builder consumer.Builder) BuilderOption {
return func(builders *DefaultBuilders) {
builders.Consumer = builder
}
}
func WithPartitionConsumerBuilder(builder consumer.PartitionConsumerBuilder) BuilderOption {
return func(builders *DefaultBuilders) {
builders.PartitionConsumer = builder
}
}
func WithStoreBuilder(builder store.Builder) BuilderOption {
return func(builders *DefaultBuilders) {
builders.Store = builder
}
}
func WithStateStoreBuilder(builder store.StateStoreBuilder) BuilderOption {
return func(builders *DefaultBuilders) {
builders.StateStore = builder
}
}
func WithBackendBuilder(builder backend.Builder) BuilderOption {
return func(builders *DefaultBuilders) {
builders.Backend = builder
}
}
func WithChangelogBuilder(builder changelog.Builder) BuilderOption {
return func(builders *DefaultBuilders) {
builders.changelog = builder
}
}
func WithProducerBuilder(builder producer.Builder) BuilderOption {
return func(builders *DefaultBuilders) {
builders.Producer = builder
}
}
func init() {
saramaMetrics.UseNilMetrics = true
}
func NewStreamBuilder(config *StreamBuilderConfig, options ...BuilderOption) *StreamBuilder {
config.Logger.Info(`
_ _ _
| | / ) | | _
| | / / \ \ | |_ ____ ____ ____ ____
| |< < \ \| _) / ___) _ ) _ | \
| | \ \ _____) ) |__| | ( (/ ( ( | | | | |
|_| \_|______/ \___)_| \____)_||_|_|_|_|
𝐆𝐨𝐥𝐚𝐧𝐠 𝐊𝐚𝐟𝐤𝐚 𝐒𝐭𝐫𝐞𝐚𝐦𝐬
`)
config.validate()
config.DefaultBuilders.build(options...)
//enabling kafka broker logs
if config.KafkaLogsEnabled {
sarama.Logger = config.Logger.NewLog(log.Prefixed(`broker`))
}
b := &StreamBuilder{
config: config,
streams: make(map[string]*kStream),
globalTables: make(map[string]*globalKTable),
changelogTopics: make(map[string]*admin.Topic),
logger: config.Logger,
metricsReporter: config.MetricsReporter,
defaultBuilders: config.DefaultBuilders,
graph: graph.NewGraph(),
topicBuilder: &topicBuilder{
topics: make(map[string]*admin.Topic),
admin: config.DefaultBuilders.KafkaAdmin,
logger: config.Logger.NewLog(log.Prefixed(`topic-builder`)),
},
}
b.config.Consumer.BootstrapServers = config.BootstrapServers
b.config.Consumer.GroupId = config.ApplicationId
b.config.Consumer.Logger = config.Logger
b.config.Consumer.MetricsReporter = config.MetricsReporter
b.storeRegistry = store.NewRegistry(&store.RegistryConfig{
Host: config.Store.Http.Host,
HttpEnabled: config.Store.Http.Enabled,
StoreBuilder: b.defaultBuilders.Store,
StateStoreBuilder: b.defaultBuilders.StateStore,
IndexedStoreBuilder: b.defaultBuilders.IndexedStore,
Logger: config.Logger,
MetricsReporter: b.metricsReporter,
})
return b
}
func (b *StreamBuilder) StoreRegistry() store.Registry {
return b.storeRegistry
}
func (b *StreamBuilder) Build(streams ...Stream) error {
for _, stream := range streams {
switch s := stream.(type) {
case *kStream:
if err := b.buildKStream(s); err != nil {
return err
}
case *globalKTable:
b.buildGlobalKTable(s)
default:
return errors.Errorf(`unsupported stream type [%v]:`, reflect.TypeOf(s))
}
}
b.renderGTableGraph()
for _, stream := range b.streams {
b.graph.RenderTopology(stream.topology)
}
b.config.Logger.Info(b.graph.Build())
b.config.Logger.Info(fmt.Sprintf("\n%s", b.config.String(b)))
if err := b.createChangelogTopics(); err != nil {
return err
}
b.setUpChangelogs()
return nil
}
func (b *StreamBuilder) Stream(topic string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...Option) Stream {
if keyEncoder == nil {
log.Fatal(`keyEncoder cannot be null`)
}
if valEncoder == nil {
log.Fatal(`valEncoder cannot be null`)
}
opts := []Option{
withBuilder(b),
WithWorkerPoolOptions(b.config.WorkerPool),
WithConfig(StreamConfigs{
`stream.processor.retry`: 2,
`stream.processor.retry.interval`: 100,
`stream.processor.changelog.enabled`: b.config.ChangeLog.Enabled,
`stream.processor.changelog.topic.name`: fmt.Sprintf(`%s-%s-changelog`, b.config.ApplicationId, topic),
`stream.processor.changelog.topic.minInSyncReplicas`: b.config.ChangeLog.MinInSycReplicas,
`stream.processor.changelog.buffer.enabled`: b.config.ChangeLog.Buffer.Enabled,
`stream.processor.changelog.buffer.flushInterval`: b.config.ChangeLog.Buffer.FlushInterval,
`stream.processor.changelog.buffer.size`: b.config.ChangeLog.Buffer.Size,
`stream.processor.changelog.replicated`: b.config.ChangeLog.Replicated,
`stream.processor.changelog.topic.replicationFactor`: b.config.ChangeLog.ReplicationFactor,
`stream.processor.dlq.enabled`: false,
}),
}
return newKStream(func(s string) string { return topic }, keyEncoder, valEncoder, nil, append(opts, options...)...)
}
func (b *StreamBuilder) GlobalTable(topic string, keyEncoder encoding.Builder, valEncoder encoding.Builder, store string, options ...GlobalTableOption) GlobalTable {
//apply options
opts := new(globalTableOptions)
opts.initialOffset = GlobalTableOffsetDefault
opts.backendWriter = globalTableStoreWriter
opts.recordVersionComparator = func(newVersion, currentVersion int64) bool {
return newVersion > currentVersion
}
for _, o := range options {
o(opts)
}
if keyEncoder == nil {
opts.logger.Fatal(`keyEncoder cannot be null`)
}
if valEncoder == nil {
opts.logger.Fatal(`valEncode cannot be null`)
}
s := b.Stream(topic, keyEncoder, valEncoder)
stream := &globalKTable{
kStream: s.(*kStream),
storeName: store,
options: opts,
}
return stream
}
func (b *StreamBuilder) buildKStream(kStream *kStream) error {
streams, err := kStream.Build()
if err != nil {
return err
}
for _, stream := range streams {
//streamConfig := new(streamConfig)
//streamConfig.topic = stream.topic(b.config.ApplicationId + `_`)
//streamConfig.taskPoolConfig = &task_pool.PoolConfig{
// NumOfWorkers: b.config.WorkerPool.NumOfWorkers,
// WorkerBufferSize: b.config.WorkerPool.WorkerBufferSize,
// Order: b.config.WorkerPool.Order,
//}
//streamConfig.changelog = new(changelogConfig)
//streamConfig.changelog.enabled = stream.config.changelog.enabled
//
//if streamConfig.changelog.enabled {
// suffix := b.config.ChangeLog.Suffix
// replicationFactor := b.config.ChangeLog.ReplicationFactor
// minInSycReplicas := b.config.ChangeLog.MinInSycReplicas
// replicated := b.config.ChangeLog.Replicated
// buffered := b.config.ChangeLog.Buffer.Enabled
// bufferSize := b.config.ChangeLog.Buffer.Size
// bufferFlush := b.config.ChangeLog.Buffer.FlushInterval
//
// if stream.config.changelog.suffix != `` {
// suffix = stream.config.changelog.suffix
// }
//
// if stream.config.changelog.replicationFactor > 0 {
// replicationFactor = stream.config.changelog.replicationFactor
// }
//
// if stream.config.changelog.minInSycReplicas > 0 {
// minInSycReplicas = stream.config.changelog.minInSycReplicas
// }
//
// if stream.config.changelog.replicated {
// replicated = true
// }
//
// if stream.config.changelog.buffer.enabled {
// buffered = true
// }
//
// if stream.config.changelog.buffer.size > 0 {
// bufferSize = stream.config.changelog.buffer.size
// }
//
// if stream.config.changelog.buffer.flushInterval > 0 {
// bufferFlush = stream.config.changelog.buffer.flushInterval
// }
//
// streamConfig.changelog.topic.name = b.config.ApplicationId + `_` + stream.topic(b.config.ApplicationId+`_`) + suffix
// streamConfig.changelog.topic.suffix = suffix
// streamConfig.changelog.topic.replicationFactor = replicationFactor
// streamConfig.changelog.topic.minInSycReplicas = minInSycReplicas
// streamConfig.changelog.replicated = replicated
// streamConfig.changelog.buffer.enabled = buffered
// streamConfig.changelog.buffer.size = bufferSize
// streamConfig.changelog.buffer.flushInterval = bufferFlush
//}
b.streams[stream.topic(b.config.ApplicationId+`_`)] = stream
}
return nil
}
func (b *StreamBuilder) buildGlobalKTable(table *globalKTable) {
stor, err := b.storeRegistry.Store(table.storeName)
if err != nil {
b.logger.Fatal(err)
}
table.store = stor
//tableConfig := new(globalKTable)
//tableConfig.table = table
/*tableConfig.store.changelog.enabled = table.config.changelog.enabled
if _, ok := table.store.(store.RecoverableStore); ok && table.config.changelog.enabled {
suffix := b.config.Store.ChangeLog.Suffix
replicationFactor := b.config.Store.ChangeLog.ReplicationFactor
minInSycReplicas := b.config.Store.ChangeLog.MinInSycReplicas
if table.config.changelog.suffix != `` {
suffix = table.config.changelog.suffix
}
if table.config.changelog.replicationFactor > 0 {
replicationFactor = table.config.changelog.replicationFactor
}
if table.config.changelog.minInSycReplicas > 0 {
minInSycReplicas = table.config.changelog.minInSycReplicas
}
tableConfig.store.changelog.topic.name = b.config.ApplicationId + `_` + table.topic(b.config.ApplicationId+`_`) + suffix
tableConfig.store.changelog.topic.suffix = suffix
tableConfig.store.changelog.topic.replicationFactor = replicationFactor
tableConfig.store.changelog.topic.minInSycReplicas = minInSycReplicas
}*/
b.globalTables[table.topic(b.config.ApplicationId+`_`)] = table
}
func (b *StreamBuilder) renderGTableGraph() {
b.graph.GTableStreams(`kstreams`, `globalTables`, map[string]string{
`style`: `filled`,
`fillcolor`: `grey`,
}, nil)
for topic, tableConfig := range b.globalTables {
topicU := strings.ReplaceAll(topic, `-`, `_`)
topicU = strings.ReplaceAll(topicU, `.`, `_`)
b.graph.Source(`globalTables`, `g_table_`+topicU, map[string]string{
`label`: fmt.Sprintf(`"topic = %s"`, topic),
}, nil)
b.graph.Store(`g_table_`+topicU, tableConfig.store, map[string]string{
`label`: fmt.Sprintf(`"Name: %s\nBackend: %s"`, tableConfig.store.Name(), tableConfig.store.Backend().Name()),
}, nil)
}
}
func (b *StreamBuilder) createChangelogTopics() error {
b.config.Logger.Info(`fetching changelog topics...`)
var topics []string
// stream changelog configs
for _, stream := range b.streams {
if !stream.config.changelog.enabled {
continue
}
topics = append(topics, stream.topic(``))
}
// global table changelog configs
/*for _, tableConfig := range b.globalTables {
if tableConfig.store.changelog.topic.name == `` {
continue
}
topics = append(topics, tableConfig.store.changelog.topic.name)
changelogTopics[tableConfig.topic] = topicConfig{
topicName: tableConfig.store.changelog.topic.name,
minInSycReplicas: tableConfig.store.changelog.topic.minInSycReplicas,
replicationFactor: tableConfig.store.changelog.topic.replicationFactor,
changelogReplicated: tableConfig.store.changelog.replicated,
}
}*/
if len(topics) < 1 {
return nil
}
// fetch topic info
info, err := b.defaultBuilders.KafkaAdmin.FetchInfo(topics)
if err != nil {
return err
}
for tp, config := range info {
if config.Error != nil {
return err
}
b.streams[tp].config.changelog.topic.NumPartitions = config.NumPartitions
b.streams[tp].config.changelog.topic.ConfigEntries[`cleanup.policy`] = `compact`
b.changelogTopics[tp] = b.streams[tp].config.changelog.topic
b.topicBuilder.apply(b.streams[tp].config.changelog.topic)
}
b.topicBuilder.build()
b.config.Logger.Info(`changelog topics created`)
return nil
}
func (b *StreamBuilder) setUpChangelogs() {
// setup replica syncers for changelog topics
// first get changelog replica Enabled topics from stream topic configs
var replicaTps []consumer.TopicPartition
for _, stream := range b.streams {
if !stream.config.changelog.enabled || !stream.config.changelog.replicated {
continue
}
b.logger.Error(b.changelogTopics)
pts := b.changelogTopics[stream.topic(``)].NumPartitions
for i := int32(0); i <= pts; i++ {
replicaTps = append(replicaTps, consumer.TopicPartition{
Topic: stream.config.changelog.topic.Name,
Partition: i,
})
}
}
//setting up chnagelog replica manager
if len(replicaTps) > 0 {
rep, err := changelog.NewReplicaManager(&changelog.ReplicaManagerConf{
OffsetManager: b.defaultBuilders.OffsetManager,
Backend: b.defaultBuilders.Backend,
Consumer: b.defaultBuilders.PartitionConsumer,
Tps: replicaTps,
Logger: b.logger,
})
if err != nil {
b.config.Logger.Fatal(
`changelog replica manager init failed due to`, err)
}
b.changelogReplicaManager = rep
}
b.defaultBuilders.changelog = func(id string, topic string, partition int32, opts ...changelog.Options) (changelog.Changelog, error) {
markProducer, err := producer.NewProducer(&producer.Config{
Logger: b.config.Logger,
MetricsReporter: b.metricsReporter,
BootstrapServers: b.config.BootstrapServers,
Id: `test`,
})
if err != nil {
b.config.Logger.Fatal(err)
}
if b.streams[topic].config.changelog.buffer.enabled {
opts = append(opts, changelog.Buffered(b.streams[topic].config.changelog.buffer.size))
opts = append(opts, changelog.FlushInterval(b.streams[topic].config.changelog.buffer.flushInterval))
}
conf := &changelog.StateChangelogConfig{
Logger: b.logger,
Metrics: b.metricsReporter,
Topic: topic,
Partition: partition,
ChangelogId: id,
ApplicationId: b.config.ApplicationId,
Producer: markProducer,
Consumer: b.defaultBuilders.PartitionConsumer,
}
if b.streams[topic].config.changelog.replicated {
conf.ReplicaManager = b.changelogReplicaManager
}
return changelog.NewStateChangelog(conf, opts...)
}
}
<|start_filename|>consumer/events.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package consumer
import "fmt"
type Event interface {
String() string
}
type PartitionAllocated struct {
tps []TopicPartition
}
func (p *PartitionAllocated) String() string {
return fmt.Sprintf(`%v`, p.tps)
}
func (p *PartitionAllocated) TopicPartitions() []TopicPartition {
return p.tps
}
type PartitionRemoved struct {
tps []TopicPartition
}
func (p *PartitionRemoved) String() string {
return fmt.Sprintf(`%v`, p.tps)
}
func (p *PartitionRemoved) TopicPartitions() []TopicPartition {
return p.tps
}
type PartitionEnd struct {
tps []TopicPartition
}
func (p *PartitionEnd) String() string {
return fmt.Sprintf(`%v`, p.tps)
}
func (p *PartitionEnd) TopicPartitions() []TopicPartition {
return p.tps
}
type Error struct {
err error
}
func (p *Error) String() string {
return fmt.Sprint(`consumer error`, p.err)
}
func (p *Error) Error() string {
return fmt.Sprint(`consumer error`, p.err)
}
<|start_filename|>examples/example_2/main.go<|end_filename|>
package main
import "github.com/tryfix/kstream/examples/example_1/stream"
func main() {
stream.Init()
}
<|start_filename|>kstream/changelog/state_changelog.go<|end_filename|>
package changelog
import (
"context"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/producer"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"time"
)
type stateChangelog struct {
applicationId string
id string
topic string
partition int32
recovering bool
stopRecovery chan bool
recoveryStopped chan bool
buffer *Buffer
options *options
changelogSuffix string
metrics *changeLogMetrics
logger log.Logger
consumer consumer.PartitionConsumerBuilder
replicaManager *ReplicaManager
}
type changeLogMetrics struct {
marksLatency metrics.Observer
recoveryRate metrics.Counter
recoveryLatency metrics.Observer
deleteLatency metrics.Observer
}
type StateChangelogConfig struct {
ChangelogId string
ApplicationId string
Producer producer.Producer
Topic string
Partition int32
Logger log.Logger
ReplicaManager *ReplicaManager
Metrics metrics.Reporter
Consumer consumer.PartitionConsumerBuilder
}
func NewStateChangelog(config *StateChangelogConfig, opts ...Options) (Changelog, error) {
options := new(options)
if err := options.apply(fmt.Sprintf(`%s_%d`, config.ChangelogId, config.Partition), opts...); err != nil {
return nil, err
}
var metricsLabels = []string{`topic`, `partition`}
buffer := NewBuffer(config.Producer, options.bufferSize, options.flushInterval, config.Logger)
buffer.metrics.flushLatency = config.Metrics.Observer(metrics.MetricConf{
Path: `k_stream_changelog_buffer_flush_latency_microseconds`,
})
return &stateChangelog{
topic: config.Topic,
partition: config.Partition,
id: config.ChangelogId,
applicationId: config.ApplicationId,
options: options,
stopRecovery: make(chan bool),
recoveryStopped: make(chan bool, 1),
buffer: buffer,
changelogSuffix: `_changelog`,
replicaManager: config.ReplicaManager,
logger: config.Logger,
consumer: config.Consumer,
metrics: &changeLogMetrics{
marksLatency: config.Metrics.Observer(metrics.MetricConf{
Path: `k_stream_changelog_mark_latency_microseconds`,
Labels: metricsLabels,
}),
deleteLatency: config.Metrics.Observer(metrics.MetricConf{
Path: `k_stream_changelog_delete_latency_microseconds`,
Labels: metricsLabels,
}),
recoveryRate: config.Metrics.Counter(metrics.MetricConf{
Path: `k_stream_changelog_recovery_rate`,
Labels: metricsLabels,
}),
recoveryLatency: config.Metrics.Observer(metrics.MetricConf{
Path: `k_stream_changelog_recovery_latency_microseconds`,
Labels: metricsLabels,
}),
},
}, nil
}
func (c *stateChangelog) ReadAll(ctx context.Context) ([]*data.Record, error) {
begin := time.Now()
c.recovering = true
simpleConsumer, err := c.consumer.Build(consumer.BuilderWithId(fmt.Sprintf(`changelog_state_consumer_%s_%d`, c.topic, c.partition)))
if err != nil {
return nil, err
}
ticker := time.NewTicker(1 * time.Second)
c.logger.InfoContext(ctx,
fmt.Sprintf(`changelog recovery started for %s - %s[%d]`, c.id, c.topic, c.partition))
// get replica local cache
tp := consumer.TopicPartition{
Topic: c.changelogTopic(),
Partition: c.partition,
}
messages := make([]*data.Record, 0)
// default offset position will be beginning offset of the partition
lastSyncedOffset := int64(consumer.Earliest)
// if cache available for the partition validate and recover
if c.replicaManager != nil {
cache, err := c.replicaManager.GetCache(tp)
if err != nil {
return nil, err
}
cachedOffset, err := cache.LastSynced()
if err != nil {
return nil, err
}
valid, err := c.replicaManager.offsetManager.OffsetValid(tp.Topic, tp.Partition, cachedOffset)
if err != nil {
return nil, err
}
if valid {
lastSyncedOffset = cachedOffset
cachedRecords := cache.ReadAll()
cachedRecordsLen := len(cachedRecords)
messages = make([]*data.Record, 0, cachedRecordsLen)
messages = append(messages, cachedRecords...)
c.logger.Info(fmt.Sprintf(`[%d] messages recovered from local cache for [%s]`, cachedRecordsLen, tp))
}
}
// manually assign partition to the consumer
c.logger.Info(fmt.Sprintf(`recovery consumer started for [%s] from [%d]`, tp, lastSyncedOffset))
events, err := simpleConsumer.Consume(c.changelogTopic(), c.partition, consumer.Offset(lastSyncedOffset))
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`cannot read partition %s - %s[%d]`,
c.id, c.changelogTopic(), c.partition))
}
c.logger.InfoContext(ctx,
fmt.Sprintf(`recovering from changelog %s - %s[%d]`, c.id, c.changelogTopic(), c.partition))
go func() {
for range ticker.C {
c.logger.InfoContext(ctx,
fmt.Sprintf(`%s - %s[%d] [%d] messages done`, c.id, c.changelogTopic(), c.partition, len(messages)))
}
}()
go func() {
for range c.stopRecovery {
if err := simpleConsumer.Close(); err != nil {
c.logger.Error(
fmt.Sprintf(`closing consumer for %s[%d] failed due to %+v`, c.changelogTopic(), c.partition, err))
}
}
}()
go func() {
for range simpleConsumer.Errors() {
c.logger.ErrorContext(ctx,
fmt.Sprintf(`recovery failed for %s - %s[%d], err : %+v`, c.id, c.topic, c.partition, err))
}
}()
MainLoop:
for ev := range events {
switch e := ev.(type) {
case *data.Record:
e.Topic = c.topic
messages = append(messages, e)
c.metrics.recoveryRate.Count(1, map[string]string{
`topic`: c.changelogTopic(),
`partition`: fmt.Sprint(c.partition),
})
case *consumer.PartitionEnd:
c.logger.InfoContext(ctx,
fmt.Sprintf(`end of changelog partition for %s - %s[%d]`, c.id, c.topic, c.partition))
if err := simpleConsumer.Close(); err != nil {
c.logger.Error(
fmt.Sprintf(`closing consumer for %s[%d] failed due to %+v`, c.changelogTopic(), c.partition, err))
}
break MainLoop
}
}
ticker.Stop()
if len(messages) > 0 {
deDuplicated := deDuplicate(messages)
c.logger.Info(
fmt.Sprintf(`%d duplicates were removed while recovering `, len(messages)-len(deDuplicated)))
messages = deDuplicated
}
timeTaken := time.Since(begin).Nanoseconds()
c.metrics.recoveryLatency.Observe(float64(timeTaken/1e3), map[string]string{
`topic`: c.changelogTopic(),
`partition`: fmt.Sprint(c.partition),
})
c.logger.InfoContext(ctx,
fmt.Sprintf(`%d messages recovered in %d miliseconds for %s - %s[%d] `,
len(messages), timeTaken/1000000, c.id, c.topic, c.partition))
c.recovering = false
c.recoveryStopped <- true
c.logger.InfoContext(ctx,
fmt.Sprintf(`changelog recovery done for %s[%d] `, c.topic, c.partition))
return messages, nil
}
func (c *stateChangelog) Put(ctx context.Context, record *data.Record) error {
c.buffer.Store(c.prepareRecord(record))
return nil
}
func (c *stateChangelog) PutAll(ctx context.Context, records []*data.Record) error {
panic(`implement me`)
}
// changelog topics will be compaction enabled and keys with null records will be deleted
func (c *stateChangelog) Delete(ctx context.Context, record *data.Record) error {
c.buffer.Delete(c.prepareRecord(record))
return nil
}
func (c *stateChangelog) DeleteAll(ctx context.Context, records []*data.Record) error {
panic(`implement me`)
}
func (c *stateChangelog) Close() {
if c.recovering {
c.stopRecovery <- true
<-c.recoveryStopped
}
c.buffer.Close()
c.logger.Info(
fmt.Sprintf(`state changelog %s_%d running `, c.topic, c.partition))
}
func (c *stateChangelog) prepareRecord(r *data.Record) *data.Record {
return &data.Record{
Key: r.Key,
Value: r.Value,
Timestamp: r.Timestamp,
Topic: c.changelogTopic(),
Partition: r.Partition,
}
}
func (c *stateChangelog) changelogTopic() string {
return c.applicationId + `_` + c.topic + c.changelogSuffix
}
func deDuplicate(duplicates []*data.Record) []*data.Record {
deDuplicated := make(map[string]*data.Record)
for _, record := range duplicates {
// ignore deleted but still not removed from the changelog topic messages
if len(record.Value) < 1 {
delete(deDuplicated, string(record.Key))
continue
}
deDuplicated[string(record.Key)] = record
}
records := make([]*data.Record, 0, len(deDuplicated))
for _, record := range deDuplicated {
records = append(records, record)
}
return records
}
<|start_filename|>consumer/group_handler.go<|end_filename|>
package consumer
import (
"context"
"fmt"
"github.com/Shopify/sarama"
"github.com/tryfix/kstream/data"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"sync"
"time"
)
type ReBalanceHandler interface {
OnPartitionRevoked(ctx context.Context, revoked []TopicPartition) error
OnPartitionAssigned(ctx context.Context, assigned []TopicPartition) error
}
type groupHandler struct {
reBalanceHandler ReBalanceHandler
partitionMap map[string]*partition
partitions chan Partition
logger log.Logger
recordUuidExtractFunc RecordUuidExtractFunc
mu *sync.Mutex
metrics struct {
reporter metrics.Reporter
reBalancing metrics.Gauge
commitLatency metrics.Observer
reBalanceLatency metrics.Observer
endToEndLatency metrics.Observer
}
}
func (h *groupHandler) Setup(session sarama.ConsumerGroupSession) error {
tps := h.extractTps(session.Claims())
h.logger.Info(fmt.Sprintf(`setting up partitions [%#v]`, tps))
if err := h.reBalanceHandler.OnPartitionAssigned(session.Context(), tps); err != nil {
return err
}
h.mu.Lock()
defer h.mu.Unlock()
for _, tp := range tps {
p := newPartition(tp)
h.partitionMap[tp.String()] = p
h.partitions <- p
}
return nil
}
func (h *groupHandler) Cleanup(session sarama.ConsumerGroupSession) error {
tps := h.extractTps(session.Claims())
h.logger.Info(fmt.Sprintf(`cleaning up partitions [%#v]`, tps))
h.mu.Lock()
for _, tp := range tps {
h.partitionMap[tp.String()].close()
delete(h.partitionMap, tp.String())
}
h.mu.Unlock()
return h.reBalanceHandler.OnPartitionRevoked(session.Context(), tps)
}
func (h *groupHandler) ConsumeClaim(g sarama.ConsumerGroupSession, c sarama.ConsumerGroupClaim) error {
tp := TopicPartition{
Topic: c.Topic(),
Partition: c.Partition(),
}
h.mu.Lock()
h.partitionMap[tp.String()].groupSession = g
ch := h.partitionMap[tp.String()].records
h.mu.Unlock()
for msg := range c.Messages() {
t := time.Since(msg.Timestamp)
h.metrics.endToEndLatency.Observe(float64(t.Nanoseconds()/1e3), map[string]string{
`topic`: msg.Topic,
`partition`: fmt.Sprint(msg.Partition),
})
record := &data.Record{
Key: msg.Key,
Value: msg.Value,
Offset: msg.Offset,
Topic: msg.Topic,
Partition: msg.Partition,
Timestamp: msg.Timestamp,
Headers: data.RecordHeaders(msg.Headers),
}
uuid := h.recordUuidExtractFunc(record)
record.UUID = uuid
h.logger.Trace("record received after " +
t.String() +
" for " + tp.String() +
" with key: " + string(msg.Key) +
" and value: " + string(msg.Value) +
" with record-id [" + record.UUID.String() + "]")
ch <- record
}
return nil
}
func (h *groupHandler) extractTps(kafkaTps map[string][]int32) []TopicPartition {
tps := make([]TopicPartition, 0)
for topic, partitions := range kafkaTps {
for _, p := range partitions {
tps = append(tps, TopicPartition{
Topic: topic,
Partition: p,
})
}
}
return tps
}
<|start_filename|>examples/example_2/encoders/encoders.go<|end_filename|>
package encoders
import (
"github.com/tryfix/kstream/examples/example_2/events"
"github.com/tryfix/kstream/kstream/encoding"
)
//var KeyEncoder = func() encoding.Encoder { return Int64Encoder{} }
var StringEncoder = func() encoding.Encoder { return encoding.StringEncoder{} }
var CommonABEncoder = func() encoding.Encoder { return CommonEncoder{} }
var AAEncoder = func() encoding.Encoder { return events.AA{} }
var BBEncoder = func() encoding.Encoder { return events.BB{} }
var CCEncoder = func() encoding.Encoder { return events.CC{} }
<|start_filename|>examples/example_1/stream/account_credited.go<|end_filename|>
package stream
import (
"context"
"fmt"
"github.com/google/uuid"
"github.com/tryfix/kstream/examples/example_1/events"
kstream "github.com/tryfix/kstream/kstream"
"github.com/tryfix/kstream/kstream/branch"
"github.com/tryfix/kstream/kstream/encoding"
"time"
)
type AccountCredited struct {
Upstream kstream.Stream
AccountDetailTable kstream.GlobalTable
CustomerProfileTable kstream.GlobalTable
KeyEncoder func() encoding.Encoder
MessageEncoder func() encoding.Encoder
}
func (ac AccountCredited) Init() {
accountCreditedBranches := ac.Upstream.Branch([]branch.Details{
{
Name: `account_credited`,
Predicate: func(ctx context.Context, key interface{}, val interface{}) (b bool, e error) {
_, ok := val.(events.AccountCredited)
return ok, nil
},
}})
accountCreditedBranch := accountCreditedBranches[0]
filteredAccountCredited := accountCreditedBranch.Filter(ac.filterFromTimestamp)
joinedCreditedAccountDetails := filteredAccountCredited.JoinGlobalTable(ac.AccountDetailTable, ac.accountCreditedAccountDetailsKeyMapping, ac.accountCreditedAccountDetailsMapping, 1) //1 for inner join
joinedCreditedCustomerProfile := joinedCreditedAccountDetails.JoinGlobalTable(ac.CustomerProfileTable, ac.accountCreditedMessageCustomerProfileKeyMapping, ac.accountMessageCustomerProfileDetailsMapping, 1)
joinedCreditedCustomerProfile.To(`message`, ac.KeyEncoder, ac.MessageEncoder)
}
func (ac AccountCredited) filterFromTimestamp(ctx context.Context, key, value interface{}) (b bool, e error) {
accCredited, _ := value.(events.AccountCredited)
if time.Now().UnixNano()/1e6-accCredited.Timestamp > 300000 {
return false, nil
}
return true, nil
}
func (ac AccountCredited) accountCreditedAccountDetailsKeyMapping(_, value interface{}) (interface{}, error) {
accCredited, _ := value.(events.AccountCredited)
return accCredited.Body.AccountNo, nil
}
func (ac AccountCredited) accountCreditedAccountDetailsMapping(left interface{}, right interface{}) (joined interface{}, err error) {
l, _ := left.(events.AccountCredited)
r, _ := right.(events.AccountDetailsUpdated)
dateTime := time.Unix(l.Body.CreditedAt, 0).Format(time.RFC1123)
text := fmt.Sprintf(`Your a/c %d is credited with %v USD on %v at %v`, l.Body.AccountNo, l.Body.Amount, dateTime, l.Body.Location)
message := events.MessageCreated{
ID: uuid.New().String(),
Type: "message_created",
Timestamp: time.Now().UnixNano() / 1e6,
}
message.Body.CustomerID = r.Body.CustomerID
message.Body.Text = text
return message, nil
}
func (ac AccountCredited) accountCreditedMessageCustomerProfileKeyMapping(key interface{}, value interface{}) (mappedKey interface{}, err error) {
message, _ := value.(events.MessageCreated)
return message.Body.CustomerID, nil
}
func (ac AccountCredited) accountMessageCustomerProfileDetailsMapping(left interface{}, right interface{}) (joined interface{}, err error) {
l, _ := left.(events.MessageCreated)
r, _ := right.(events.CustomerProfileUpdated)
l.Body.Address = r.Body.ContactDetails.Address
l.Body.Phone = r.Body.ContactDetails.Phone
l.Body.Email = r.Body.ContactDetails.Email
return l, nil
}
<|start_filename|>kstream/changelog/mock_changelog.go<|end_filename|>
package changelog
import (
"context"
"crypto/sha1"
"github.com/tryfix/kstream/data"
"sync"
)
type mockChangelog struct {
data map[string]*data.Record
mu *sync.Mutex
buffer *Buffer
bufferSize int
}
func NewMockChangelog(bufferSize int) Changelog {
return &mockChangelog{
//buffer: NewBuffer(),
bufferSize: bufferSize,
mu: new(sync.Mutex),
data: make(map[string]*data.Record),
}
}
func (c *mockChangelog) ReadAll(ctx context.Context) ([]*data.Record, error) {
var data []*data.Record
for _, rec := range c.data {
data = append(data, rec)
}
return data, nil
}
func (c *mockChangelog) Put(ctx context.Context, record *data.Record) error {
c.mu.Lock()
defer c.mu.Unlock()
if len(c.buffer.records) >= c.bufferSize {
c.PutAll(ctx, c.buffer.records)
return nil
}
c.buffer.Store(record)
return nil
}
func (c *mockChangelog) PutAll(ctx context.Context, records []*data.Record) error {
c.mu.Lock()
defer c.mu.Unlock()
for _, rec := range records {
c.data[c.hash(rec.Key)] = rec
}
return nil
}
func (c *mockChangelog) Delete(ctx context.Context, record *data.Record) error {
c.mu.Lock()
defer c.mu.Unlock()
delete(c.data, c.hash(record.Key))
return nil
}
func (c *mockChangelog) DeleteAll(ctx context.Context, records []*data.Record) error {
c.mu.Lock()
defer c.mu.Unlock()
for _, rec := range records {
delete(c.data, c.hash(rec.Key))
}
return nil
}
func (c *mockChangelog) Info() map[string]interface{} {
panic("implement me")
}
func (c *mockChangelog) Close() {
c.buffer = nil
c.mu = nil
c.data = nil
}
func (c *mockChangelog) hash(k []byte) string {
ha := sha1.New()
ha.Write(k)
return string(ha.Sum(nil))
}
<|start_filename|>examples/example_1/stream/init.go<|end_filename|>
package stream
import (
"github.com/google/uuid"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/examples/example_1/encoders"
"github.com/tryfix/kstream/kstream"
"github.com/tryfix/kstream/kstream/worker_pool"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"os"
"os/signal"
)
func init() {
log.StdLogger = log.Constructor.Log(
log.WithLevel(`TRACE`),
log.WithColors(true),
)
}
func Init() {
builderConfig := kstream.NewStreamBuilderConfig()
builderConfig.BootstrapServers = []string{`localhost:9092`}
builderConfig.ApplicationId = `k_stream_example_1`
builderConfig.ConsumerCount = 1
builderConfig.Host = `localhost:8100`
builderConfig.AsyncProcessing = true
//builderConfig.Store.StorageDir = `storage`
builderConfig.Store.Http.Host = `:9002`
builderConfig.ChangeLog.Enabled = false
builderConfig.KafkaLogsEnabled = true
//builderConfig.ChangeLog.Buffer.Enabled = true
//builderConfig.ChangeLog.Buffer.Size = 100
//builderConfig.ChangeLog.ReplicationFactor = 3
//builderConfig.ChangeLog.MinInSycReplicas = 2
builderConfig.WorkerPool.Order = worker_pool.OrderByKey
builderConfig.WorkerPool.NumOfWorkers = 100
builderConfig.WorkerPool.WorkerBufferSize = 10
builderConfig.MetricsReporter = metrics.PrometheusReporter(metrics.ReporterConf{`streams`, `k_stream_test`, nil})
builderConfig.Logger = log.StdLogger
kAdmin := admin.NewKafkaAdmin(builderConfig.BootstrapServers, admin.WithLogger(log.StdLogger))
CreateTopics(kAdmin)
//builderConfig.Producer.Pool.NumOfWorkers = 1
builder := kstream.NewStreamBuilder(builderConfig)
builder.StoreRegistry().New(
`account_detail_store`,
encoders.KeyEncoder,
encoders.AccountDetailsUpdatedEncoder)
builder.StoreRegistry().New(
`customer_profile_store`,
encoders.KeyEncoder,
encoders.CustomerProfileUpdatedEncoder)
err := builder.Build(InitStreams(builder)...)
if err != nil {
log.Fatal(log.WithPrefix(`boot.boot.Init`, `error in stream building`), err)
}
synced := make(chan bool, 1)
// trap SIGINT to trigger a shutdown.
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
stream := kstream.NewStreams(builder,
kstream.NotifyOnStart(synced),
kstream.WithConsumerOptions(consumer.WithRecordUuidExtractFunc(func(message *data.Record) uuid.UUID {
// extract uuid from header
id, err := uuid.Parse(string(message.Key))
if err != nil {
return uuid.New()
}
return id
})),
)
go func() {
select {
case <-signals:
stream.Stop()
}
}()
if err := stream.Start(); err != nil {
log.Fatal(log.WithPrefix(`boot.boot.Init`, `error in stream starting`), err)
}
}
func CreateTopics(kAdmin admin.KafkaAdmin) {
var topics = map[string]*admin.Topic{
`transaction`: {
NumPartitions: 2,
ReplicationFactor: 1,
},
`account_detail`: {
NumPartitions: 2,
ReplicationFactor: 1,
ConfigEntries: map[string]string{
`cleanup.policy`: `compact`,
},
},
`customer_profile`: {
NumPartitions: 2,
ReplicationFactor: 1,
ConfigEntries: map[string]string{
`cleanup.policy`: `compact`,
},
},
}
defer kAdmin.Close()
if err := kAdmin.CreateTopics(topics); err != nil {
log.Fatal(err)
}
}
func InitStreams(builder *kstream.StreamBuilder) []kstream.Stream {
transactionStream := initTransactionStream(builder)
accountDetailTable := initAccountDetailTable(builder)
customerProfileTable := initCustomerProfileTable(builder)
accountCredited := AccountCredited{
Upstream: transactionStream,
AccountDetailTable: accountDetailTable,
CustomerProfileTable: customerProfileTable,
KeyEncoder: encoders.KeyEncoder,
MessageEncoder: encoders.MessageEncoder,
}
accountCredited.Init()
accountDebited := AccountDebited{
Upstream: transactionStream,
AccountDetailTable: accountDetailTable,
CustomerProfileTable: customerProfileTable,
KeyEncoder: encoders.KeyEncoder,
MessageEncoder: encoders.MessageEncoder,
}
accountDebited.Init()
return []kstream.Stream{transactionStream, accountDetailTable, customerProfileTable}
}
<|start_filename|>kstream/processors/filter.go<|end_filename|>
package processors
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/topology"
)
type FilterFunc func(ctx context.Context, key, value interface{}) (bool, error)
type Filter struct {
Id int32
FilterFunc FilterFunc
next bool
childs []topology.Node
childBuilders []topology.NodeBuilder
}
func (f *Filter) ChildBuilders() []topology.NodeBuilder {
return f.childBuilders
}
func (f *Filter) Childs() []topology.Node {
return f.childs
}
func (f *Filter) AddChildBuilder(builder topology.NodeBuilder) {
f.childBuilders = append(f.childBuilders, builder)
}
func (f *Filter) AddChild(node topology.Node) {
f.childs = append(f.childs, node)
}
func (f *Filter) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range f.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &Filter{
FilterFunc: f.FilterFunc,
childs: childs,
next: f.next,
Id: f.Id,
}, nil
}
func (f *Filter) Name() string {
return `filter`
}
func (f *Filter) Next() bool {
return f.next
}
func (f *Filter) Type() topology.Type {
return topology.Type(`filter`)
}
func (f *Filter) ID() int32 {
return f.Id
}
func (f *Filter) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, next bool, err error) {
ok, err := f.FilterFunc(ctx, kIn, vIn)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `process error`)
}
if ok {
for _, child := range f.childs {
_, _, next, err := child.Run(ctx, kIn, vIn)
if err != nil || !next {
return nil, nil, false, err
}
}
}
return kIn, vIn, ok, nil
}
<|start_filename|>kstream/builder_config.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package kstream
import (
"bytes"
"fmt"
"github.com/Shopify/sarama"
"github.com/olekukonko/tablewriter"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/kstream/worker_pool"
"github.com/tryfix/kstream/producer"
"github.com/tryfix/kstream/util"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"time"
)
type StreamBuilderConfig struct {
ApplicationId string
AsyncProcessing bool
BootstrapServers []string // kafka Brokers
WorkerPool *worker_pool.PoolConfig
Store struct {
BackendBuilder backend.Builder
ChangeLog struct {
MinInSycReplicas int // min number of insync replications in other nodes
ReplicationFactor int
Suffix string
Buffered bool
BufferedSize int
}
Http struct {
Enabled bool
Host string
}
}
DLQ struct {
Enabled bool
BootstrapServers []string
TopicFormat string
//Type dlq.DqlType // G, T
Topic string // if global
}
Host string
ChangeLog struct {
Enabled bool
Replicated bool
MinInSycReplicas int // min number of insync replications in other nodes
ReplicationFactor int
Suffix string
Buffer struct {
Enabled bool
Size int
FlushInterval time.Duration
}
}
Consumer *consumer.Config
ConsumerCount int
*sarama.Config
Producer *producer.Config
KafkaLogsEnabled bool
MetricsReporter metrics.Reporter
Logger log.Logger
DefaultBuilders *DefaultBuilders
}
var logger = log.NewLog(log.WithColors(true), log.WithFilePath(true), log.WithLevel(log.TRACE), log.Prefixed(`k-stream`)).Log()
func NewStreamBuilderConfig() *StreamBuilderConfig {
config := &StreamBuilderConfig{}
config.Producer = producer.NewConfig()
config.Consumer = consumer.NewConsumerConfig()
config.Config = sarama.NewConfig()
config.Version = sarama.V2_4_0_0
config.Producer.Version = sarama.V2_4_0_0
config.Consumer.Version = sarama.V2_4_0_0
config.ConsumerCount = 1
config.ChangeLog.Suffix = `_changelog`
config.ChangeLog.Replicated = false
config.ChangeLog.MinInSycReplicas = 2
config.ChangeLog.ReplicationFactor = 3
config.ChangeLog.Buffer.Enabled = true
config.ChangeLog.Buffer.Size = 100
config.ChangeLog.Buffer.FlushInterval = 100 * time.Millisecond
config.Producer.Pool.NumOfWorkers = 1
//config.Producer.Producer.Retry.Backoff = time.Millisecond * 30
//config.Producer.Retry = 5
//config.Producer.Idempotent = true
config.Producer.RequiredAcks = producer.WaitForAll
//config.Producer.BatchNumMessages = 1
//config.Producer.QueueBufferingMax = 1
config.KafkaLogsEnabled = false
//set default task execution order
config.WorkerPool = &worker_pool.PoolConfig{
Order: worker_pool.OrderByKey,
NumOfWorkers: 100,
WorkerBufferSize: 10,
}
// default metrics reporter
config.MetricsReporter = metrics.NoopReporter()
config.Logger = logger
config.DefaultBuilders = &DefaultBuilders{configs: config}
return config
}
func (c *StreamBuilderConfig) validate() {
c.Logger = c.Logger.NewLog(log.Prefixed(`k-stream`))
if c.ApplicationId == `` {
c.Logger.Fatal(`[ApplicationId] cannot be empty`)
}
//if c.Host == `` {
// c.logger.Fatal( `[Host] cannot be empty`)
//}
if len(c.BootstrapServers) < 1 {
c.Logger.Fatal(`[BootstrapServers] cannot be empty`)
}
if c.ChangeLog.MinInSycReplicas < 1 {
c.Logger.Fatal(`[ChangeLog.MinInSycReplicas] cannot be zero`)
}
if c.ChangeLog.ReplicationFactor < 1 {
c.Logger.Fatal(`[ChangeLog.ReplicationFactor] cannot be zero`)
}
if c.ChangeLog.Buffer.FlushInterval < 1 {
c.Logger.Fatal(`[ChangeLog.Buffer.FlushInterval] cannot be zero`)
}
if c.ChangeLog.Buffer.Size < 1 {
c.Logger.Fatal(`[ChangeLog.Buffer.Size] cannot be zero`)
}
// producer configurations
//if c.Producer.QueueBufferingMax < 1 {
// c.logger.Fatal( `[Producer.QueueBufferingMax] should be greater than zero`)
//}
//
//if c.Producer.BatchNumMessages < 1 {
// c.logger.Fatal( `[Producer.BatchNumMessages] should be greater than zero`)
//}
//
//if c.Producer.Retry < 1 {
// c.logger.Fatal( `[Producer.Retry] should be greater than zero`)
//}
//
//if c.Producer.RetryBackOff < 1*time.Millisecond {
// c.logger.Fatal( `[Producer.RetryBackOff] should be equal or greater than 1ms`)
//}
//DLQ configurations
//if c.DLQ.Enabled {
// if len(c.DLQ.BootstrapServers) < 1 {
// c.logger.Fatal( `[DLQ.BootstrapServers] cannot be empty`)
// }
//
// if c.DLQ.Type == dlq.DqlGlobal && c.DLQ.TopicFormat == `` {
// c.logger.Fatal(
// `[DLQ.BootstrapServers] global topic format cannot be empty when topic type is [dlq.DqlGlobal]`)
// }
//}
//Worker Pool options
if c.WorkerPool.Order > 2 || c.WorkerPool.Order < 0 {
c.Logger.Fatal(
`Invalid WorkerPool Order`)
}
if c.WorkerPool.WorkerBufferSize < 1 {
c.Logger.Fatal(
`WorkerPool WorkerBufferSize should be greater than 0`)
}
if c.WorkerPool.NumOfWorkers < 1 {
c.Logger.Fatal(
`WorkerPool NumOfWorkers should be greater than 0`)
}
}
func (c *StreamBuilderConfig) String(b *StreamBuilder) string {
data := util.StrToMap(`kStream`, c)
data = append(data, []string{``})
data = append(data, []string{`Stream configs`, ``})
for topic, stream := range b.streams {
b := new(bytes.Buffer)
flowTable := tablewriter.NewWriter(b)
flowData := [][]string{
{`changeLog.Enabled`, fmt.Sprint(stream.config.changelog.enabled)},
}
if stream.config.changelog.enabled {
flowData = append(flowData,
[]string{`changeLog.Buffered`, fmt.Sprint(stream.config.changelog.buffer.enabled)},
[]string{`changeLog.Buffer.Size`, fmt.Sprint(stream.config.changelog.buffer.size)},
[]string{`changeLog.Buffer.Flush`, stream.config.changelog.buffer.flushInterval.String()},
[]string{`changeLog.MinInSycReplicas`, fmt.Sprint(stream.config.changelog.topic.ConfigEntries[`min.insync.replicas`])},
[]string{`changeLog.ReplicationFactor`, fmt.Sprint(stream.config.changelog.topic.ReplicationFactor)},
[]string{`changeLog.Replicated`, fmt.Sprint(stream.config.changelog.replicated)},
[]string{`changeLog.Suffix`, fmt.Sprint(stream.config.changelog.suffix)},
)
}
flowData = append(flowData,
[]string{`worker-pool.order`, fmt.Sprint(stream.config.workerPool.Order)},
[]string{`worker-pool.NumOfWorker`, fmt.Sprint(stream.config.workerPool.NumOfWorkers)},
[]string{`worker-pool.WorkerBufferSize.`, fmt.Sprint(stream.config.workerPool.WorkerBufferSize)},
)
for _, v := range flowData {
flowTable.SetColumnAlignment([]int{tablewriter.ALIGN_LEFT})
flowTable.Append(v)
}
flowTable.Render()
data = append(data, []string{topic, b.String()})
}
data = append(data, []string{``})
data = append(data, []string{`Global table configs`, ``})
for topic, t := range b.globalTables {
b := new(bytes.Buffer)
flowTable := tablewriter.NewWriter(b)
tableData := [][]string{
{`store`, fmt.Sprint(t.store.Name())},
}
//if t.store.changelog.enabled {
// tableData = append(tableData,
// []string{`changeLog.Buffered`, fmt.Sprint(t.store.changelog.buffer.enabled)},
// []string{`changeLog.Buffer.Size`, fmt.Sprint(t.store.changelog.buffer.size)},
// []string{`changeLog.Buffer.Flush`, t.store.changelog.buffer.flushInterval.String()},
// []string{`changeLog.MinInSycReplicas`, fmt.Sprint(t.store.changelog.topic.minInSycReplicas)},
// []string{`changeLog.ReplicationFactor`, fmt.Sprint(t.store.changelog.topic.replicationFactor)},
// []string{`changeLog.Replicated`, fmt.Sprint(t.store.changelog.replicated)},
// []string{`changeLog.Suffix`, fmt.Sprint(t.store.changelog.topic.suffix)},
// )
//}
for _, v := range tableData {
flowTable.SetColumnAlignment([]int{tablewriter.ALIGN_LEFT})
flowTable.Append(v)
}
flowTable.Render()
data = append(data, []string{topic, b.String()})
}
out := new(bytes.Buffer)
table := tablewriter.NewWriter(out)
table.SetHeader([]string{"Config", "Value"})
for _, v := range data {
table.SetColumnAlignment([]int{tablewriter.ALIGN_LEFT})
table.Append(v)
}
table.Render()
return out.String()
}
<|start_filename|>kstream/store/meta.go<|end_filename|>
package store
import (
"fmt"
"github.com/Shopify/sarama"
"github.com/tryfix/log"
"time"
)
type Meta struct {
client sarama.Client
hostMappings map[string]string
group string
}
func NewMata(c sarama.Client, group string) *Meta {
m := &Meta{
client: c,
hostMappings: make(map[string]string),
group: group,
}
go m.runRefresher()
return m
}
func (m *Meta) GetMeta(tp string) string {
return m.hostMappings[tp]
}
func (m *Meta) Refresh() {
b, err := m.client.Coordinator(m.group)
if err != nil {
log.Fatal(err)
}
res, err := b.DescribeGroups(&sarama.DescribeGroupsRequest{
Groups: []string{m.group},
})
if err != nil {
log.Fatal(err)
}
for _, group := range res.Groups {
if group.GroupId == m.group {
for _, member := range group.Members {
// get host port through following function
//mt , _ := member.GetMemberMetadata()
//mt.UserData
ass, err := member.GetMemberAssignment()
if err != nil {
log.Fatal(err)
}
for topic, partitions := range ass.Topics {
for _, p := range partitions {
m.hostMappings[fmt.Sprintf(`%s_%d`, topic, p)] = member.ClientHost
}
}
}
}
}
log.Info(fmt.Sprintf(`host meta refreshed %+v`, m.hostMappings))
}
func (m *Meta) runRefresher() {
t := time.NewTicker(30 * time.Second)
for range t.C {
m.Refresh()
}
}
<|start_filename|>consumer/partition_consumer.go<|end_filename|>
package consumer
import (
"fmt"
"github.com/Shopify/sarama"
"github.com/google/uuid"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/offsets"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"time"
)
type PartitionConsumer interface {
Consume(topic string, partition int32, offset Offset) (<-chan Event, error)
Errors() <-chan *Error
Close() error
Id() string
}
type partitionConsumer struct {
id string
offsets offsets.Manager
consumerEvents chan Event
consumerErrors chan *Error
consumer sarama.Consumer
partitionConsumer sarama.PartitionConsumer
logger log.Logger
metrics struct {
consumerBuffer metrics.Gauge
consumerBufferMax metrics.Gauge
endToEndLatency metrics.Observer
}
closing chan bool
closed chan bool
}
func NewPartitionConsumer(c *Config) (PartitionConsumer, error) {
if err := c.Validate(); err != nil {
log.Fatal(err)
return nil, err
}
offsetManager := offsets.NewManager(&offsets.Config{
Config: c.Config,
BootstrapServers: c.BootstrapServers,
Logger: c.Logger,
})
consumer, err := sarama.NewConsumer(c.BootstrapServers, c.Config)
if err != nil {
return nil, errors.WithPrevious(err, `new consumer failed `)
}
pc := &partitionConsumer{
id: c.Id,
offsets: offsetManager,
consumer: consumer,
consumerEvents: make(chan Event, c.ChannelBufferSize),
consumerErrors: make(chan *Error, 1),
closed: make(chan bool, 1),
closing: make(chan bool, 1),
logger: c.Logger.NewLog(log.Prefixed(`partition-consumer`)),
}
labels := []string{`topic`, `partition`}
pc.metrics.consumerBuffer = c.MetricsReporter.Gauge(metrics.MetricConf{
Path: `k_stream_partition_consumer_buffer`,
Labels: append(labels, []string{`type`}...),
})
pc.metrics.consumerBufferMax = c.MetricsReporter.Gauge(metrics.MetricConf{
Path: `k_stream_partition_consumer_buffer_max`,
Labels: append(labels, []string{`type`}...),
})
pc.metrics.endToEndLatency = c.MetricsReporter.Observer(metrics.MetricConf{
Path: `k_stream_partition_consumer_end_to_end_latency_microseconds`,
Labels: labels,
})
return pc, nil
}
func (c *partitionConsumer) Consume(topic string, partition int32, offset Offset) (<-chan Event, error) {
partitionStart, err := c.offsets.GetOffsetOldest(topic, partition)
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`cannot get oldest offset for %s[%d]`, topic, partition))
}
partitionEnd, err := c.offsets.GetOffsetLatest(topic, partition)
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`cannot get latest latest for %s[%d]`, topic, partition))
}
// partition is empty
if offset == Offset(sarama.OffsetNewest) || partitionEnd == 0 || partitionStart == partitionEnd || offset == Offset(partitionEnd-1) {
// change the offset to default offset
offset = Offset(sarama.OffsetOldest)
c.consumerEvents <- &PartitionEnd{
tps: []TopicPartition{{
Topic: topic,
Partition: partition,
}},
}
// if offset is valid always request the next offset
if offset > 0 {
offset += 1
}
}
pConsumer, err := c.consumer.ConsumePartition(topic, partition, int64(offset))
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`cannot initiate partition consumer for %s_%d`, topic, partition))
}
c.partitionConsumer = pConsumer
go c.runBufferMetrics(pConsumer)
go c.consumeErrors(pConsumer)
go c.consumeRecords(pConsumer, partitionEnd)
return c.consumerEvents, nil
}
func (c *partitionConsumer) Errors() <-chan *Error {
return c.consumerErrors
}
func (c *partitionConsumer) Id() string {
return c.id
}
func (c *partitionConsumer) consumeErrors(consumer sarama.PartitionConsumer) {
for err := range consumer.Errors() {
c.logger.Error(err)
c.consumerErrors <- &Error{err}
}
close(c.consumerErrors)
}
func (c *partitionConsumer) runBufferMetrics(consumer sarama.PartitionConsumer) {
ticker := time.NewTicker(1 * time.Second)
for range ticker.C {
c.metrics.consumerBuffer.Count(float64(len(consumer.Messages())), map[string]string{
`topic`: ``,
`partition`: `0`,
`type`: `sarama`,
})
c.metrics.consumerBufferMax.Count(float64(cap(consumer.Messages())), map[string]string{
`topic`: ``,
`partition`: `0`,
`type`: `sarama`,
})
c.metrics.consumerBuffer.Count(float64(len(c.consumerEvents)), map[string]string{
`topic`: ``,
`partition`: `0`,
`type`: `k_stream`,
})
c.metrics.consumerBufferMax.Count(float64(cap(c.consumerEvents)), map[string]string{
`topic`: ``,
`partition`: `0`,
`type`: `k_stream`,
})
}
}
func (c *partitionConsumer) consumeRecords(consumer sarama.PartitionConsumer, highWatermark int64) {
MainLoop:
for {
select {
case msg, ok := <-consumer.Messages():
if !ok {
break MainLoop
}
latency := time.Since(msg.Timestamp).Nanoseconds() / 1e6
c.metrics.endToEndLatency.Observe(float64(latency*1e3), map[string]string{
`topic`: msg.Topic,
`partition`: fmt.Sprint(msg.Partition),
})
c.logger.Trace(fmt.Sprintf(`message [%d] received after %d miliseconds for %s[%d]`,
msg.Offset, latency, msg.Topic, msg.Partition))
// TODO remove this
c.logger.Debug(`k-stream.Partition-consumer.Trace.Sync`,
fmt.Sprintf(`message received for topic [%s], partition [%d] with key [%s] and value [%s] after %d milisconds delay at %s`,
msg.Topic,
msg.Partition,
string(msg.Key),
string(msg.Value),
time.Since(msg.Timestamp).Nanoseconds()/1e6,
time.Now(),
))
c.consumerEvents <- &data.Record{
Key: msg.Key,
Value: msg.Value,
Offset: msg.Offset,
Topic: msg.Topic,
Partition: msg.Partition,
Timestamp: msg.Timestamp,
UUID: uuid.New(),
Headers: msg.Headers,
}
//if highWatermark == 0 || highWatermark-1 == msg.Offset {
if msg.Offset == highWatermark-1 {
c.consumerEvents <- &PartitionEnd{
tps: []TopicPartition{{
Topic: msg.Topic,
Partition: msg.Partition,
}},
}
}
case <-c.closing:
break MainLoop
}
}
c.closed <- true
}
func (c *partitionConsumer) Close() error {
c.logger.Info(fmt.Sprintf("[%s] closing... ", c.id))
c.closing <- true
<-c.closed
if err := c.partitionConsumer.Close(); err != nil {
if errs, ok := err.(sarama.ConsumerErrors); ok {
for _, er := range errs {
c.logger.Warn(fmt.Sprintf("partition consumer error while closing [%s] ", er))
}
}
c.logger.Error(fmt.Sprintf("partition consumer close failed [%s] ", err))
}
if err := c.consumer.Close(); err != nil {
c.logger.Error(fmt.Sprintf("consumer close failed [%s] ", err))
}
if err := c.offsets.Close(); err != nil {
c.logger.Error(fmt.Sprintf("cannot close offsets [%s] ", err))
}
close(c.consumerEvents)
c.cleanUpMetrics()
c.logger.Info(fmt.Sprintf("[%s] closed", c.id))
return nil
}
func (c *partitionConsumer) cleanUpMetrics() {
c.metrics.consumerBuffer.UnRegister()
c.metrics.consumerBufferMax.UnRegister()
c.metrics.endToEndLatency.UnRegister()
}
<|start_filename|>kstream/default_builders.go<|end_filename|>
package kstream
import (
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/backend/memory"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/kstream/changelog"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/kstream/kstream/offsets"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/kstream/producer"
)
type DefaultBuilders struct {
Producer producer.Builder
changelog changelog.Builder
Consumer consumer.Builder
PartitionConsumer consumer.PartitionConsumerBuilder
Store store.Builder
IndexedStore store.IndexedStoreBuilder
Backend backend.Builder
StateStore store.StateStoreBuilder
OffsetManager offsets.Manager
KafkaAdmin admin.KafkaAdmin
configs *StreamBuilderConfig
}
func (dbs *DefaultBuilders) build(options ...BuilderOption) {
// apply options
for _, option := range options {
option(dbs)
}
// default backend builder will be memory
if dbs.configs.Store.BackendBuilder == nil {
backendBuilderConfig := memory.NewConfig()
backendBuilderConfig.Logger = dbs.configs.Logger
backendBuilderConfig.MetricsReporter = dbs.configs.MetricsReporter
dbs.Backend = memory.Builder(backendBuilderConfig)
dbs.configs.Store.BackendBuilder = dbs.Backend
}
dbs.Backend = dbs.configs.Store.BackendBuilder
dbs.configs.Store.BackendBuilder = dbs.Backend
dbs.Store = func(name string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...store.Options) (store.Store, error) {
return store.NewStore(name, keyEncoder(), valEncoder(), append(
options,
store.WithBackendBuilder(dbs.configs.Store.BackendBuilder),
store.WithLogger(dbs.configs.Logger),
)...)
}
dbs.IndexedStore = func(name string, keyEncoder encoding.Builder, valEncoder encoding.Builder, indexes []store.Index, options ...store.Options) (store.IndexedStore, error) {
return store.NewIndexedStore(name, keyEncoder(), valEncoder(), indexes, append(
options,
store.WithBackendBuilder(dbs.configs.Store.BackendBuilder),
store.WithLogger(dbs.configs.Logger),
)...)
}
if dbs.Producer == nil {
pool, err := producer.NewPool(dbs.configs.Producer.Pool.NumOfWorkers, func(options *producer.Config) (producer.Producer, error) {
options = dbs.configs.Producer
options.BootstrapServers = dbs.configs.BootstrapServers
options.Logger = dbs.configs.Logger
options.MetricsReporter = dbs.configs.MetricsReporter
return producer.NewProducer(options)
})
if err != nil {
dbs.configs.Logger.Fatal(err)
}
dbs.Producer = func(options *producer.Config) (producer.Producer, error) {
return pool, nil
}
}
if dbs.Consumer == nil {
dbs.Consumer = consumer.NewBuilder()
}
dbs.Consumer.Config().GroupId = dbs.configs.ApplicationId
dbs.Consumer.Config().BootstrapServers = dbs.configs.BootstrapServers
dbs.Consumer.Config().MetricsReporter = dbs.configs.MetricsReporter
dbs.Consumer.Config().Logger = dbs.configs.Logger
dbs.Consumer.Config().Consumer = dbs.configs.Consumer.Consumer
if dbs.OffsetManager == nil {
dbs.OffsetManager = offsets.NewManager(&offsets.Config{
Config: dbs.configs.Config,
BootstrapServers: dbs.configs.BootstrapServers,
Logger: dbs.configs.Logger,
})
}
if dbs.KafkaAdmin == nil {
dbs.KafkaAdmin = admin.NewKafkaAdmin(dbs.configs.BootstrapServers,
admin.WithKafkaVersion(dbs.configs.Consumer.Version),
admin.WithLogger(dbs.configs.Logger),
)
}
if dbs.PartitionConsumer == nil {
dbs.PartitionConsumer = consumer.NewPartitionConsumerBuilder()
}
dbs.PartitionConsumer.Config().BootstrapServers = dbs.configs.BootstrapServers
dbs.PartitionConsumer.Config().MetricsReporter = dbs.configs.MetricsReporter
dbs.PartitionConsumer.Config().Logger = dbs.configs.Logger
dbs.PartitionConsumer.Config().Config.Consumer = dbs.configs.Consumer.Consumer
}
<|start_filename|>examples/example_2/encoders/common_encoder.go<|end_filename|>
package encoders
import (
"encoding/json"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/examples/example_2/events"
)
type CommonEncoder struct {
ID string `json:"id"`
Type string `json:"type"`
Body interface{} `json:"body"`
Timestamp int64 `json:"timestamp"`
}
func (t CommonEncoder) Encode(data interface{}) ([]byte, error) {
panic("implement me")
}
func (t CommonEncoder) Decode(data []byte) (interface{}, error) {
te := CommonEncoder{}
err := json.Unmarshal(data, &te)
if err != nil {
return nil, err
}
switch te.Type {
case `aa`:
ac := events.AA{}
err := json.Unmarshal(data, &ac)
if err != nil {
return nil, err
}
return ac, nil
case `bb`:
ad := events.BB{}
err := json.Unmarshal(data, &ad)
if err != nil {
return nil, err
}
return ad, nil
case `cc`:
ad := events.CC{}
err := json.Unmarshal(data, &ad)
if err != nil {
return nil, err
}
return ad, nil
default:
return nil, errors.New(fmt.Sprintf(`unexpected type received :- %v`, te.Type))
}
}
<|start_filename|>data/record_test.go<|end_filename|>
package data
import (
"fmt"
"reflect"
"testing"
)
func TestRecord_RecordKey(t *testing.T) {
rec := Record{
Key: []byte(`k`),
Value: []byte(`v`),
}
if !reflect.DeepEqual(rec.RecordKey(), []byte(`k`)) {
t.Fail()
}
}
func TestRecord_RecordValue(t *testing.T) {
rec := Record{
Key: []byte(`k`),
Value: []byte(`v`),
}
if !reflect.DeepEqual(rec.RecordValue(), []byte(`v`)) {
t.Fail()
}
}
func TestRecord_String(t *testing.T) {
r := Record{
Key: []byte(`k`),
Value: []byte(`v`),
Offset: 1000,
Topic: `test`,
Partition: 1,
}
if r.String() != fmt.Sprintf(`%s_%d_%d`, r.Topic, r.Partition, r.Offset) {
t.Fail()
}
}
<|start_filename|>kstream/changelog/changelog.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package changelog
import (
"context"
"github.com/tryfix/kstream/data"
)
type Builder func(id string, topic string, partition int32, opts ...Options) (Changelog, error)
type Changelog interface {
ReadAll(ctx context.Context) ([]*data.Record, error)
Put(ctx context.Context, record *data.Record) error
PutAll(ctx context.Context, record []*data.Record) error
Delete(ctx context.Context, record *data.Record) error
DeleteAll(ctx context.Context, record []*data.Record) error
Close()
}
<|start_filename|>consumer/consumer.go<|end_filename|>
package consumer
import (
"context"
"fmt"
"github.com/Shopify/sarama"
"github.com/google/uuid"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/data"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"sync"
"time"
)
type RecordUuidExtractFunc func(message *data.Record) uuid.UUID
type consumerOptions struct {
recordUuidExtractorFunc RecordUuidExtractFunc
}
func (opts *consumerOptions) applyDefault() {
opts.recordUuidExtractorFunc = func(message *data.Record) uuid.UUID {
return uuid.New()
}
}
func (opts *consumerOptions) apply(options ...Option) {
for _, option := range options {
option(opts)
}
}
type Option func(*consumerOptions)
func WithRecordUuidExtractFunc(fn RecordUuidExtractFunc) Option {
return func(options *consumerOptions) {
options.recordUuidExtractorFunc = fn
}
}
type TopicPartition struct {
Topic string
Partition int32
}
func (tp TopicPartition) String() string {
return fmt.Sprintf(`%s-%d`, tp.Topic, tp.Partition)
}
type Consumer interface {
Consume(tps []string, handler ReBalanceHandler) (chan Partition, error)
Errors() <-chan *Error
Close() error
}
type Offset int64
const (
Earliest Offset = -2
Latest Offset = -1
)
func (o Offset) String() string {
switch o {
case -2:
return `Earliest`
case -1:
return `Latest`
default:
return fmt.Sprint(int(o))
}
}
type consumer struct {
config *Config
context struct {
ctx context.Context
cancel context.CancelFunc
}
saramaGroup sarama.ConsumerGroup
saramaGroupHandler *groupHandler
consumerErrors chan *Error
stopping chan bool
stopped chan bool
}
func NewConsumer(config *Config, options ...Option) (Consumer, error) {
if err := config.validate(); err != nil {
return nil, err
}
// apply options
config.options.apply(options...)
config.Logger = config.Logger.NewLog(log.Prefixed(`consumer`))
c := &consumer{
config: config,
consumerErrors: make(chan *Error, 1),
stopping: make(chan bool, 1),
stopped: make(chan bool, 1),
}
ctx, cancel := context.WithCancel(context.Background())
c.context.ctx = ctx
c.context.cancel = cancel
return c, nil
}
func (c *consumer) Consume(tps []string, handler ReBalanceHandler) (chan Partition, error) {
c.saramaGroupHandler = &groupHandler{
mu: new(sync.Mutex),
recordUuidExtractFunc: c.config.options.recordUuidExtractorFunc,
reBalanceHandler: handler,
partitions: make(chan Partition, 1000),
partitionMap: make(map[string]*partition),
logger: c.config.Logger,
}
group, err := sarama.NewConsumerGroup(c.config.BootstrapServers, c.config.GroupId, c.config.Config)
if err != nil {
return nil, errors.WithPrevious(err, "Failed to create consumer")
}
c.saramaGroup = group
c.setUpMetrics()
// Subscribe for all InputTopics,
c.config.Logger.Info(fmt.Sprintf(`subscribing to topics %v`, tps))
go func() {
for err := range group.Errors() {
c.config.Logger.Error(fmt.Sprintf("Error: %+v", err))
c.consumerErrors <- &Error{err}
}
}()
go c.consume(c.context.ctx, tps, c.saramaGroupHandler)
return c.saramaGroupHandler.partitions, nil
}
func (c *consumer) consume(ctx context.Context, tps []string, h sarama.ConsumerGroupHandler) {
CLoop:
for {
if err := c.saramaGroup.Consume(ctx, tps, h); err != nil && err != sarama.ErrClosedConsumerGroup {
t := 2 * time.Second
c.config.Logger.Error(fmt.Sprintf(`consumer err (%s) while consuming. retrying in %s`, err, t.String()))
time.Sleep(t)
continue CLoop
}
select {
case <-c.context.ctx.Done():
c.config.Logger.Info(fmt.Sprintf(`stopping consumer due to %s`, c.context.ctx))
break CLoop
default:
continue CLoop
}
}
c.stopped <- true
}
func (c *consumer) Errors() <-chan *Error {
return c.consumerErrors
}
func (c *consumer) Close() error {
c.config.Logger.Info(`upstream consumer is closing...`)
defer c.config.Logger.Info(`upstream consumer closed`)
defer close(c.saramaGroupHandler.partitions)
c.context.cancel()
<-c.stopped
// close sarama consumer so application will leave from the consumer group
if err := c.saramaGroup.Close(); err != nil {
c.config.Logger.Error(`k-stream.consumer`,
fmt.Sprintf(`cannot close consumer due to %+v`, err))
}
c.cleanUpMetrics()
return nil
}
func (c *consumer) setUpMetrics() {
c.saramaGroupHandler.metrics.commitLatency = c.config.MetricsReporter.Observer(metrics.MetricConf{
Path: `k_stream_consumer_commit_latency_microseconds`,
ConstLabels: map[string]string{`group`: c.config.GroupId},
})
c.saramaGroupHandler.metrics.endToEndLatency = c.config.MetricsReporter.Observer(metrics.MetricConf{
Path: `k_stream_consumer_end_to_latency_latency_microseconds`,
Labels: []string{`topic`, `partition`},
ConstLabels: map[string]string{`group`: c.config.GroupId},
})
c.saramaGroupHandler.metrics.reBalanceLatency = c.config.MetricsReporter.Observer(metrics.MetricConf{
Path: `k_stream_consumer_re_balance_latency_microseconds`,
ConstLabels: map[string]string{`group`: c.config.GroupId},
})
c.saramaGroupHandler.metrics.reBalancing = c.config.MetricsReporter.Gauge(metrics.MetricConf{
Path: `k_stream_consumer_rebalancing`,
ConstLabels: map[string]string{`group`: c.config.GroupId},
})
c.saramaGroupHandler.metrics.reBalancing = c.config.MetricsReporter.Gauge(metrics.MetricConf{
Path: `k_stream_consumer_rebalancing`,
ConstLabels: map[string]string{`group`: c.config.GroupId},
})
}
func (c *consumer) cleanUpMetrics() {
c.saramaGroupHandler.metrics.commitLatency.UnRegister()
c.saramaGroupHandler.metrics.endToEndLatency.UnRegister()
c.saramaGroupHandler.metrics.reBalanceLatency.UnRegister()
c.saramaGroupHandler.metrics.reBalancing.UnRegister()
}
<|start_filename|>kstream/processors/key_selector.go<|end_filename|>
package processors
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/topology"
)
type SelectKeyFunc func(ctx context.Context, key, value interface{}) (kOut interface{}, err error)
type KeySelector struct {
Id int32
SelectKeyFunc SelectKeyFunc
childBuilders []topology.NodeBuilder
childs []topology.Node
}
func (ks *KeySelector) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range ks.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &KeySelector{
SelectKeyFunc: ks.SelectKeyFunc,
childs: childs,
Id: ks.Id,
}, nil
}
func (ks *KeySelector) ChildBuilders() []topology.NodeBuilder {
return ks.childBuilders
}
func (ks *KeySelector) AddChildBuilder(builder topology.NodeBuilder) {
ks.childBuilders = append(ks.childBuilders, builder)
}
func (ks *KeySelector) Next() bool {
return true
}
func (ks *KeySelector) ID() int32 {
return ks.Id
}
func (ks *KeySelector) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error) {
k, err := ks.SelectKeyFunc(ctx, kIn, vIn)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `error in select key function`)
}
for _, child := range ks.childs {
_, _, next, err := child.Run(ctx, k, vIn)
if err != nil || !next {
return nil, nil, false, err
}
}
return k, vIn, true, err
}
func (ks *KeySelector) Type() topology.Type {
return topology.Type(`key_selector`)
}
func (ks *KeySelector) Childs() []topology.Node {
return ks.childs
}
func (ks *KeySelector) AddChild(node topology.Node) {
ks.childs = append(ks.childs, node)
}
<|start_filename|>examples/example_1/stream/mock-stream/mock_stream.go<|end_filename|>
package main
import (
"context"
"fmt"
"github.com/google/uuid"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/examples/example_1/encoders"
"github.com/tryfix/kstream/examples/example_1/events"
"github.com/tryfix/kstream/examples/example_1/stream"
"github.com/tryfix/kstream/kstream"
"github.com/tryfix/kstream/kstream/offsets"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/kstream/kstream/worker_pool"
"github.com/tryfix/kstream/producer"
"github.com/tryfix/log"
"math/rand"
"os"
"os/signal"
"time"
)
var customerIds = []uuid.UUID{}
func init() {
for i := 1; i <= 100; i++ {
customerIds = append(customerIds, uuid.New())
}
}
func setupMockBuilders() *kstream.StreamBuilder {
config := kstream.NewStreamBuilderConfig()
topics := admin.NewMockTopics()
kafkaAdmin := &admin.MockKafkaAdmin{
Topics: topics,
}
if err := kafkaAdmin.CreateTopics(map[string]*admin.Topic{
`transaction`: {
Name: "transaction",
NumPartitions: 2,
ReplicationFactor: 1,
},
`customer_profile`: {
Name: "customer_profile",
NumPartitions: 2,
ReplicationFactor: 1,
},
`account_detail`: {
Name: "account_detail",
NumPartitions: 2,
ReplicationFactor: 1,
},
`message`: {
Name: "message",
NumPartitions: 2,
ReplicationFactor: 1,
},
}); err != nil {
log.Fatal(err)
}
prod := producer.NewMockProducer(topics)
offsetManager := &offsets.MockManager{Topics: topics}
produceAccountDetails(prod)
produceCustomerProfile(prod)
go produceAccountCredited(prod)
go produceAccountDebited(prod)
go consumeMessageAndPrint(topics)
config.BootstrapServers = []string{`localhost:9092`}
config.ApplicationId = `k_stream_example_1`
config.ConsumerCount = 1
config.Host = `localhost:8100`
config.AsyncProcessing = true
//config.Store.StorageDir = `storage`
config.Store.Http.Enabled = true
config.Store.Http.Host = `:9002`
config.ChangeLog.Enabled = false
//config.ChangeLog.Buffer.Enabled = true
//config.ChangeLog.Buffer.Size = 100
//config.ChangeLog.ReplicationFactor = 3
//config.ChangeLog.MinInSycReplicas = 2
config.WorkerPool.Order = worker_pool.OrderByKey
config.WorkerPool.NumOfWorkers = 100
config.WorkerPool.WorkerBufferSize = 10
config.Logger = log.NewLog(
log.WithLevel(`ERROR`),
log.WithColors(true),
).Log()
return kstream.NewStreamBuilder(config,
kstream.WithPartitionConsumerBuilder(consumer.NewMockPartitionConsumerBuilder(topics, offsetManager)),
kstream.WithConsumerBuilder(consumer.NewMockConsumerBuilder(topics)),
kstream.WithOffsetManager(offsetManager),
kstream.WithKafkaAdmin(kafkaAdmin),
kstream.WithProducerBuilder(func(configs *producer.Config) (i producer.Producer, e error) {
return prod, nil
}),
)
}
func main() {
builder := setupMockBuilders()
//mockBackend := backend.NewMockBackend(`mock_backend`, time.Duration(time.Second * 3600))
//accountDetailMockStore := store.NewMockStore(`account_detail_store`, encoders.KeyEncoder(), encoders.AccountDetailsUpdatedEncoder(), mockBackend)
//builder.StoreRegistry().Register(accountDetailMockStore)
//
//customerProfileMockStore := store.NewMockStore(`customer_profile_store`, encoders.KeyEncoder(), encoders.CustomerProfileUpdatedEncoder(), mockBackend)
//builder.StoreRegistry().Register(customerProfileMockStore)
builder.StoreRegistry().NewIndexedStore(
`account_detail_store`,
encoders.KeyEncoder,
encoders.AccountDetailsUpdatedEncoder, []store.Index{store.NewUuidHashIndex(`accout_customer_idx`, func(key, val interface{}) (idx uuid.UUID) {
return val.(events.AccountDetailsUpdated).Body.CustomerID
})})
builder.StoreRegistry().NewIndexedStore(
`customer_profile_store`,
encoders.UuidKeyEncoder,
encoders.CustomerProfileUpdatedEncoder, []store.Index{store.NewStringHashIndex(`customer_profile_email_idx`, func(key, val interface{}) (idx string) {
return val.(events.CustomerProfileUpdated).Body.ContactDetails.Email
})})
err := builder.Build(stream.InitStreams(builder)...)
if err != nil {
log.Fatal(`mock build failed`)
}
synced := make(chan bool, 1)
// trap SIGINT to trigger a shutdown.
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
streams := kstream.NewStreams(builder, kstream.NotifyOnStart(synced))
go func() {
select {
case <-signals:
streams.Stop()
}
}()
if err := streams.Start(); err != nil {
log.Fatal(log.WithPrefix(`boot.boot.Init`, `error in stream starting`), err)
}
//produceRealData()
}
func produceAccountCredited(streamProducer producer.Producer) {
for {
key := rand.Int63n(100)
event := events.AccountCredited{
ID: uuid.New().String(),
Type: `account_credited`,
Timestamp: time.Now().UnixNano() / 1e6,
}
event.Body.AccountNo = key
event.Body.TransactionId = rand.Int63n(10000)
event.Body.Amount = 1000.00
event.Body.Reason = `utility bill transfer`
event.Body.DebitedFrom = 1111
event.Body.CreditedAt = time.Now().UnixNano() / 1e6
event.Body.Location = `Main Branch, City A`
encodedKey, err := encoders.KeyEncoder().Encode(key)
if err != nil {
log.Error(err, event)
}
encodedVal, err := encoders.AccountCreditedEncoder().Encode(event)
if err != nil {
log.Error(err, event)
}
_, _, err = streamProducer.Produce(context.Background(), &data.Record{
Key: encodedKey,
Value: encodedVal,
Topic: `transaction`,
Timestamp: time.Now(),
})
if err != nil {
log.Error(err)
}
time.Sleep(time.Millisecond * 500)
}
}
func produceAccountDebited(streamProducer producer.Producer) {
for {
key := rand.Int63n(100)
event := events.AccountDebited{
ID: uuid.New().String(),
Type: `account_debited`,
Timestamp: time.Now().UnixNano() / 1e6,
}
event.Body.AccountNo = key
event.Body.TransactionId = rand.Int63n(10000)
event.Body.Amount = 1000.00
event.Body.Reason = `utility bill transfer`
event.Body.CreditedTo = 2222
event.Body.DebitedAt = time.Now().Unix()
event.Body.Location = `Main Branch, City A`
encodedKey, err := encoders.KeyEncoder().Encode(key)
if err != nil {
log.Error(err, event)
}
encodedVal, err := encoders.AccountDebitedEncoder().Encode(event)
if err != nil {
log.Error(err, event)
}
_, _, err = streamProducer.Produce(context.Background(), &data.Record{
Key: encodedKey,
Value: encodedVal,
Topic: `transaction`,
Timestamp: time.Now(),
})
if err != nil {
log.Error(err)
}
time.Sleep(time.Millisecond * 500)
}
}
func produceAccountDetails(streamProducer producer.Producer) {
for i := 1; i <= 100; i++ {
key := int64(i)
event := events.AccountDetailsUpdated{
ID: uuid.New().String(),
Type: `account_details_updated`,
Timestamp: time.Now().UnixNano() / 1e6,
}
event.Body.AccountNo = key
event.Body.AccountType = `Saving`
event.Body.CustomerID = customerIds[rand.Intn(99)+1]
event.Body.Branch = `Main Branch, City A`
event.Body.BranchCode = 1
event.Body.UpdatedAt = time.Now().Unix()
encodedKey, err := encoders.KeyEncoder().Encode(key)
if err != nil {
log.Error(err, event)
}
encodedVal, err := encoders.AccountDetailsUpdatedEncoder().Encode(event)
if err != nil {
log.Error(err, event)
}
_, _, err = streamProducer.Produce(context.Background(), &data.Record{
Key: encodedKey,
Value: encodedVal,
Topic: `account_detail`,
Timestamp: time.Now(),
})
if err != nil {
log.Error(err)
}
time.Sleep(time.Millisecond * 5)
}
}
func produceCustomerProfile(streamProducer producer.Producer) {
for _, id := range customerIds {
event := events.CustomerProfileUpdated{
ID: id,
Type: `customer_profile_updated`,
Timestamp: time.Now().UnixNano() / 1e6,
}
event.Body.CustomerID = id
event.Body.CustomerName = `<NAME>`
event.Body.NIC = `222222222v`
event.Body.ContactDetails.Email = `<EMAIL>`
event.Body.ContactDetails.Phone = `911`
event.Body.ContactDetails.Address = `No 1, Lane 1, City A.`
event.Body.DateOfBirth = `16th-Nov-2019`
event.Body.UpdatedAt = time.Now().Unix()
encodedKey, err := encoders.UuidKeyEncoder().Encode(id)
if err != nil {
log.Error(err, event)
}
encodedVal, err := encoders.CustomerProfileUpdatedEncoder().Encode(event)
if err != nil {
log.Error(err, event)
}
_, _, err = streamProducer.Produce(context.Background(), &data.Record{
Key: encodedKey,
Value: encodedVal,
Topic: `customer_profile`,
Timestamp: time.Now(),
})
if err != nil {
log.Error(err)
}
time.Sleep(time.Millisecond * 5)
}
}
func consumeMessageAndPrint(topics *admin.Topics) {
mockConsumer := consumer.NewMockConsumer(topics)
partitions, err := mockConsumer.Consume([]string{`message`}, rebalanceHandler{})
if err != nil {
log.Fatal(`consumer error `, err)
}
for p := range partitions {
go func(pt consumer.Partition) {
for record := range pt.Records() {
log.Debug(fmt.Sprintf(`message was received to partition %v with offset %v `, record.Partition, record.Offset))
m, err := encoders.MessageEncoder().Decode(record.Value)
if err != nil {
log.Error(err)
}
message, _ := m.(events.MessageCreated)
fmt.Println(fmt.Sprintf(`received text message := %s`, message.Body.Text))
log.Info(fmt.Sprintf(`received text message := %s`, message.Body.Text))
}
}(p)
}
}
type rebalanceHandler struct {
}
func (r rebalanceHandler) OnPartitionRevoked(ctx context.Context, revoked []consumer.TopicPartition) error {
return nil
}
func (r rebalanceHandler) OnPartitionAssigned(ctx context.Context, assigned []consumer.TopicPartition) error {
return nil
}
func produceRealData() {
config := producer.NewConfig()
config.Logger = log.NewLog(
log.WithLevel(`INFO`),
log.WithColors(true),
).Log()
config.BootstrapServers = []string{`localhost:9092`}
pro, err := producer.NewProducer(config)
if err != nil {
log.Fatal(err)
}
produceAccountDetails(pro)
produceCustomerProfile(pro)
go produceAccountCredited(pro)
produceAccountDebited(pro)
}
<|start_filename|>kstream/topology/node.go<|end_filename|>
package topology
import (
"context"
)
type Type string
const TypeSource Type = `source`
const TypeSink Type = `sink`
const TypeBranch Type = `branch`
const TypeThrough Type = `through`
const TypeJoiner Type = `joiner`
const TypeMaterialize Type = `materializer`
type Node interface {
Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error)
Type() Type
Childs() []Node
AddChild(node Node)
}
type NodeBuilder interface {
Build() (Node, error)
Type() Type
ChildBuilders() []NodeBuilder
AddChildBuilder(builder NodeBuilder)
}
<|start_filename|>kstream/processors/value_transformer.go<|end_filename|>
package processors
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/topology"
)
type ValueTransformFunc func(ctx context.Context, key, value interface{}) (vOut interface{}, err error)
type ValueTransformer struct {
Id int32
ValueTransformFunc ValueTransformFunc
childBuilders []topology.NodeBuilder
childs []topology.Node
}
func (vt *ValueTransformer) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range vt.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &ValueTransformer{
ValueTransformFunc: vt.ValueTransformFunc,
childs: childs,
Id: vt.Id,
}, nil
}
func (vt *ValueTransformer) ChildBuilders() []topology.NodeBuilder {
return vt.childBuilders
}
func (vt *ValueTransformer) AddChildBuilder(builder topology.NodeBuilder) {
vt.childBuilders = append(vt.childBuilders, builder)
}
func (vt *ValueTransformer) Next() bool {
return true
}
func (vt *ValueTransformer) ID() int32 {
return vt.Id
}
func (vt *ValueTransformer) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error) {
v, err := vt.ValueTransformFunc(ctx, kIn, vIn)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `error in value transform function`)
}
for _, child := range vt.childs {
_, _, next, err := child.Run(ctx, kIn, v)
if err != nil || !next {
return nil, nil, false, err
}
}
return kIn, v, true, err
}
func (vt *ValueTransformer) Type() topology.Type {
return topology.Type(`value_transformer`)
}
func (vt *ValueTransformer) Childs() []topology.Node {
return vt.childs
}
func (vt *ValueTransformer) AddChild(node topology.Node) {
vt.childs = append(vt.childs, node)
}
<|start_filename|>kstream/rebelance_handler.go<|end_filename|>
package kstream
import (
"context"
"fmt"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/log"
)
type reBalanceHandler struct {
userHandler consumer.ReBalanceHandler
processors *processorPool
logger log.Logger
builder *StreamBuilder
rebalancedCount int
}
func (s *reBalanceHandler) OnPartitionRevoked(ctx context.Context, revoked []consumer.TopicPartition) error {
s.logger.Info(fmt.Sprintf(`partitions %v revoking...`, revoked))
defer s.logger.Info(fmt.Sprintf(`partitions %v revoked`, revoked))
for _, tp := range revoked {
s.processors.Processor(tp).Stop()
}
if s.userHandler != nil {
return s.userHandler.OnPartitionRevoked(ctx, revoked)
}
if err := s.startChangelogReplicas(revoked); err != nil {
return err
}
return nil
}
func (s *reBalanceHandler) OnPartitionAssigned(ctx context.Context, assigned []consumer.TopicPartition) error {
s.logger.Info(fmt.Sprintf(`partitions %v assigning...`, assigned))
defer s.logger.Info(fmt.Sprintf(`partitions %v assigned`, assigned))
for _, tp := range assigned {
if err := s.processors.addProcessor(tp); err != nil {
return err
}
if err := s.processors.Processor(tp).boot(); err != nil {
return err
}
if err := s.stopChangelogReplicas(assigned); err != nil {
return err
}
}
s.logger.Info(`streams assigned`)
if s.userHandler != nil {
return s.userHandler.OnPartitionAssigned(ctx, assigned)
}
s.rebalancedCount++
return nil
}
func (s *reBalanceHandler) stopChangelogReplicas(allocated []consumer.TopicPartition) error {
if len(allocated) > 0 && s.rebalancedCount > 0 {
for _, tp := range allocated {
// stop started replicas
if s.builder.streams[tp.Topic].config.changelog.replicated {
if err := s.builder.changelogReplicaManager.StopReplicas([]consumer.TopicPartition{
{Topic: s.builder.streams[tp.Topic].config.changelog.topic.Name, Partition: tp.Partition},
}); err != nil {
return err
}
}
}
}
return nil
}
func (s *reBalanceHandler) startChangelogReplicas(allocated []consumer.TopicPartition) error {
if len(allocated) > 0 {
for _, tp := range allocated {
// stop started replicas
if s.builder.streams[tp.Topic].config.changelog.replicated {
if err := s.builder.changelogReplicaManager.StartReplicas([]consumer.TopicPartition{
{Topic: s.builder.streams[tp.Topic].config.changelog.topic.Name, Partition: tp.Partition},
}); err != nil {
return err
}
}
}
}
return nil
}
<|start_filename|>examples/example_1/stream/account_details_global_table.go<|end_filename|>
package stream
import (
"github.com/tryfix/kstream/examples/example_1/encoders"
kstream "github.com/tryfix/kstream/kstream"
)
func initAccountDetailTable(builder *kstream.StreamBuilder) kstream.GlobalTable {
return builder.GlobalTable(
`account_detail`,
encoders.KeyEncoder,
encoders.AccountDetailsUpdatedEncoder,
`account_detail_store`)
}
<|start_filename|>kstream/window/sliding.go<|end_filename|>
package window
import "github.com/tryfix/kstream/kstream/context"
type Window interface {
Store(ctx context.Context, key, value interface{}) error
Get(ctx context.Context, key interface{}) (value interface{}, err error)
}
//type slidingWindow
<|start_filename|>kstream/k_stream.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package kstream
import (
"context"
"fmt"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/kstream/branch"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/kstream/kstream/processors"
"github.com/tryfix/kstream/kstream/processors/join"
"github.com/tryfix/kstream/kstream/topology"
"github.com/tryfix/kstream/kstream/worker_pool"
"github.com/tryfix/log"
"sync/atomic"
//"sync/atomic"
"time"
)
var nodeCounter int32
type topic func(string) string
const (
LeftJoin join.Type = iota
InnerJoin
)
type Stream interface {
Branch(branches []branch.Details, opts ...Option) []Stream
SelectKey(selectKeyFunc processors.SelectKeyFunc) Stream
TransformValue(valueTransformFunc processors.ValueTransformFunc) Stream
Transform(transformer processors.TransFunc) Stream
Filter(filter processors.FilterFunc) Stream
Process(processor processors.ProcessFunc) Stream
JoinGlobalTable(table Stream, keyMapper join.KeyMapper, valMapper join.ValueMapper, typ join.Type) Stream
JoinKTable(stream Stream, keyMapper join.KeyMapper, valMapper join.ValueMapper) Stream
JoinStream(stream Stream, valMapper join.ValueMapper, opts ...join.RepartitionOption) Stream
//LeftJoin(stream Stream, keyMapper join.KeyMapper, valMapper join.ValueMapper) Stream
Through(topic string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...SinkOption) Stream
Materialize(topic, storeName string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...processors.MaterializeOption) Stream
To(topic string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...SinkOption)
}
type StreamConfigs map[string]interface{}
type kStreamOptions struct {
processorRetryCount int
processorRetryInterval time.Duration
dlq bool
logger log.Logger
builder *StreamBuilder
workerPool *worker_pool.PoolConfig
changelog struct {
topic *admin.Topic
enabled bool
replicated bool // min number of in-sync replications in other nodes
minInSycReplicas int // min number of in-sync replications in other nodes
suffix string
buffer struct {
enabled bool
size int
flushInterval time.Duration
}
}
//repartition RepartitionOptions
}
type kStream struct {
rootStream *kStream
source *kSourceBuilder
topology *topology.TopologyBuilder
root bool
config *kStreamOptions
streams []*kStream
NodeBuilder topology.NodeBuilder
Node topology.Node
keySelected bool
topic topic
}
func (c *kStreamOptions) apply(options ...Option) {
// apply defaults
c.processorRetryCount = 1
c.processorRetryInterval = 0
c.dlq = false
c.changelog.enabled = false
c.changelog.buffer.size = 10
c.changelog.buffer.flushInterval = 1 * time.Second
c.changelog.minInSycReplicas = 2
c.changelog.topic = new(admin.Topic)
c.changelog.topic.ReplicationFactor = 3
c.changelog.topic.ConfigEntries = map[string]string{}
for _, opt := range options {
opt(c)
}
}
type Option func(*kStreamOptions)
func WithWorkerPoolOptions(poolConfig *worker_pool.PoolConfig) Option {
return func(config *kStreamOptions) {
config.workerPool = poolConfig
}
}
func WithConfig(configs StreamConfigs) Option {
return func(stream *kStreamOptions) {
for p, value := range configs {
switch p {
case `stream.processor.retry`:
if v, ok := value.(int); ok {
stream.processorRetryCount = v
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.retry.interval`:
if v, ok := value.(int); ok {
stream.processorRetryInterval = time.Duration(v) * time.Millisecond
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.changelog.enabled`:
if v, ok := value.(bool); ok {
stream.changelog.enabled = v
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.changelog.topic.name`:
if v, ok := value.(string); ok {
stream.changelog.topic.Name = v
continue
}
log.Fatal(`k-stream.kStream`, fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.changelog.topic.minInSyncReplicas`:
if v, ok := value.(int); ok {
stream.changelog.minInSycReplicas = v
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.changelog.buffer.flushInterval`:
if v, ok := value.(time.Duration); ok {
stream.changelog.buffer.flushInterval = v
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.changelog.buffer.enabled`:
if v, ok := value.(bool); ok {
stream.changelog.buffer.enabled = v
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.changelog.buffer.size`:
if v, ok := value.(int); ok {
stream.changelog.buffer.size = v
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.changelog.replicated`:
if v, ok := value.(bool); ok {
stream.changelog.replicated = v
continue
}
log.Fatal(fmt.Sprintf(`unsupportxed config type for [%s]`, p))
case `stream.processor.changelog.topic.replicationFactor`:
if v, ok := value.(int); ok {
stream.changelog.topic.ReplicationFactor = int16(v)
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
case `stream.processor.dlq.enabled`:
if v, ok := value.(bool); ok {
stream.dlq = v
continue
}
log.Fatal(fmt.Sprintf(`unsupported config type for [%s]`, p))
default:
log.Fatal(fmt.Sprintf(`unsupported config [%s]`, p))
}
}
}
}
func WithLogger(logger log.Logger) Option {
return func(config *kStreamOptions) {
config.logger = logger
}
}
func withBuilder(builder *StreamBuilder) Option {
return func(config *kStreamOptions) {
config.builder = builder
}
}
func newKStream(topic topic, keyEncoder encoding.Builder, valEncoder encoding.Builder, parent *kStream, options ...Option) *kStream {
config := new(kStreamOptions)
config.apply(options...)
stream := &kStream{
config: config,
}
if parent == nil {
stream.root = true
stream.rootStream = stream
//setup source Node
sourceNode := new(SourceNode)
stream.Node = sourceNode
stream.NodeBuilder = sourceNode
//setup source builder
kSource := &kSourceBuilder{
topic: topic(``),
name: topic(``),
keyEncoderBuilder: keyEncoder,
valEncoderBuilder: valEncoder,
info: map[string]string{
`topic`: topic(``),
`changelog`: fmt.Sprint(config.changelog.enabled),
},
}
stream.topic = topic
stream.source = kSource
}
return stream
}
func (s *kStream) Branch(branches []branch.Details, opts ...Option) []Stream {
return s.branch(branches, false, opts...)
}
func (s *kStream) branch(branches []branch.Details, parallel bool, opts ...Option) []Stream {
bs := &branch.Splitter{
Id: atomic.AddInt32(&nodeCounter, 1),
}
//id := atomic.AddUint32(&nodeCounter,1)
var streams = make([]Stream, len(branches))
for i, br := range branches {
b := &branch.Branch{
Name: br.Name,
Predicate: br.Predicate,
Id: atomic.AddInt32(&nodeCounter, 1),
}
bs.AddChild(b)
bs.AddChildBuilder(b)
stream := newKStream(nil, nil, nil, s, opts...)
stream.Node = b
stream.NodeBuilder = b
stream.rootStream = s.rootStream
stream.keySelected = s.keySelected
streams[i] = stream
}
s.Node.AddChild(bs)
s.NodeBuilder.AddChildBuilder(bs)
return streams
}
func (s *kStream) SelectKey(selectKeyFunc processors.SelectKeyFunc) Stream {
sk := &processors.KeySelector{
SelectKeyFunc: selectKeyFunc,
Id: atomic.AddInt32(&nodeCounter, 1),
}
s.Node.AddChild(sk)
s.NodeBuilder.AddChildBuilder(sk)
keySelected := newKStream(nil, nil, nil, s)
keySelected.Node = sk
keySelected.NodeBuilder = sk
keySelected.keySelected = true
keySelected.rootStream = s.rootStream
return keySelected
}
func (s *kStream) TransformValue(valueTransformFunc processors.ValueTransformFunc) Stream {
tv := &processors.ValueTransformer{
ValueTransformFunc: valueTransformFunc,
Id: atomic.AddInt32(&nodeCounter, 1),
}
s.Node.AddChild(tv)
s.NodeBuilder.AddChildBuilder(tv)
valueTransformed := newKStream(nil, nil, nil, s)
valueTransformed.Node = tv
valueTransformed.NodeBuilder = tv
valueTransformed.rootStream = s.rootStream
valueTransformed.keySelected = s.keySelected
return valueTransformed
}
func (s *kStream) Transform(transformer processors.TransFunc) Stream {
t := &processors.Transformer{
TransFunc: transformer,
Id: atomic.AddInt32(&nodeCounter, 1),
}
s.Node.AddChild(t)
s.NodeBuilder.AddChildBuilder(t)
transformed := newKStream(nil, nil, nil, s)
transformed.Node = t
transformed.NodeBuilder = t
transformed.keySelected = true
transformed.rootStream = s.rootStream
return transformed
}
func (s *kStream) Filter(filter processors.FilterFunc) Stream {
f := &processors.Filter{
FilterFunc: filter,
Id: atomic.AddInt32(&nodeCounter, 1),
}
s.Node.AddChild(f)
s.NodeBuilder.AddChildBuilder(f)
filtered := newKStream(nil, nil, nil, s)
filtered.Node = f
filtered.NodeBuilder = f
filtered.rootStream = s.rootStream
filtered.keySelected = s.keySelected
return filtered
}
func (s *kStream) JoinGlobalTable(stream Stream, keyMapper join.KeyMapper, valMapper join.ValueMapper, typ join.Type) Stream {
joinStream, ok := stream.(*globalKTable)
if !ok {
log.Fatal(`k-stream.kStream`,
`unsupported join type for global table joiner, only global tables are supported`)
}
joiner := &join.GlobalTableJoiner{
Typ: typ,
Store: joinStream.storeName,
KeyMapper: keyMapper,
ValueMapper: valMapper,
Id: atomic.AddInt32(&nodeCounter, 1),
}
s.Node.AddChild(joiner)
s.NodeBuilder.AddChildBuilder(joiner)
joined := newKStream(nil, nil, nil, s)
joined.Node = joiner
joined.NodeBuilder = joiner
joined.rootStream = s.rootStream
joined.keySelected = s.keySelected
return joined
}
func (s *kStream) JoinStream(stream Stream, valMapper join.ValueMapper, opts ...join.RepartitionOption) Stream {
rightStream, ok := stream.(*kStream)
if !ok {
log.Fatal(`k-stream.kStream`,
`unsupported join type for stream joiner, only k-streams are supported`)
}
var repartition = &join.RepartitionOptions{
LeftTopic: s.rootStream.topic,
RightTopic: rightStream.rootStream.topic,
}
repartition.Apply(opts...)
leftWindow := join.NewWindow()
rightWindow := join.NewWindow()
joinedNode := &join.StreamJoiner{
Id: atomic.AddInt32(&nodeCounter, 1),
}
left := &join.SideJoiner{
Side: `left`,
LeftWindow: leftWindow,
RightWindow: rightWindow,
ValueMapper: valMapper,
Id: atomic.AddInt32(&nodeCounter, 1),
}
left.AddChild(joinedNode)
left.AddChildBuilder(joinedNode)
right := &join.SideJoiner{
Side: `right`,
LeftWindow: leftWindow,
RightWindow: rightWindow,
ValueMapper: valMapper,
Id: atomic.AddInt32(&nodeCounter, 1),
}
right.AddChild(joinedNode)
right.AddChildBuilder(joinedNode)
var setNewRightStream = func() *kStream {
err := repartition.RightRepartition.Validate(join.RightSide)
if err != nil {
log.Fatal(`k-stream.kStream`, err)
}
rightStream.To(repartition.RightRepartition.Topic.Name, repartition.RightRepartition.KeyEncoder,
repartition.RightRepartition.ValueEncoder, withPrefixTopic(func(prefix string) string { return prefix + `_right_stream_joiner` }))
newRightStream := newKStream(func(prefix string) string { return prefix + `_right_stream_joiner` },
repartition.RightRepartition.KeyEncoder, repartition.RightRepartition.ValueEncoder, nil)
return newRightStream
}
var setNewLeftStream = func() *kStream {
err := repartition.LeftRepartition.Validate(join.LeftSide)
if err != nil {
log.Fatal(`k-stream.kStream`, err)
}
s.To(repartition.LeftRepartition.Topic.Name, repartition.LeftRepartition.KeyEncoder,
repartition.LeftRepartition.ValueEncoder, withPrefixTopic(func(prefix string) string { return prefix + `_left_stream_joiner` }))
newLeftStream := newKStream(func(prefix string) string { return prefix + `_left_stream_joiner` },
repartition.RightRepartition.KeyEncoder, repartition.RightRepartition.ValueEncoder, nil)
return newLeftStream
}
var setJoinedStream = func(stm *kStream) *kStream {
joined := newKStream(nil, nil, nil, stm)
joined.Node = joinedNode
joined.NodeBuilder = joinedNode
joined.rootStream = stm.rootStream
joined.keySelected = stm.keySelected
return joined
}
if !s.keySelected && !rightStream.keySelected {
s.Node.AddChild(left)
s.NodeBuilder.AddChildBuilder(left)
rightStream.Node.AddChild(right)
rightStream.NodeBuilder.AddChildBuilder(right)
return setJoinedStream(s)
}
if rightStream.keySelected && s.keySelected {
newRightStream := setNewRightStream()
newRightStream.Node = right
newRightStream.NodeBuilder = right
s.rootStream.streams = append(s.rootStream.streams, newRightStream)
newLeftStream := setNewLeftStream()
newLeftStream.Node = left
newLeftStream.NodeBuilder = left
s.rootStream.streams = append(s.rootStream.streams, newLeftStream)
return setJoinedStream(newLeftStream)
}
if rightStream.keySelected && !s.keySelected {
newRightStream := setNewRightStream()
newRightStream.Node = right
newRightStream.NodeBuilder = right
s.rootStream.streams = append(s.rootStream.streams, newRightStream)
s.Node.AddChild(left)
s.NodeBuilder.AddChildBuilder(left)
return setJoinedStream(s)
}
if s.keySelected && !rightStream.keySelected {
newLeftStream := setNewLeftStream()
newLeftStream.Node = left
newLeftStream.NodeBuilder = left
s.rootStream.streams = append(s.rootStream.streams, newLeftStream)
rightStream.Node.AddChild(right)
rightStream.NodeBuilder.AddChildBuilder(right)
return setJoinedStream(newLeftStream)
}
log.Fatal(`k-stream.kStream.stream_joiner`, `can not reach here`)
return nil
}
func (s *kStream) JoinKTable(stream Stream, keyMapper join.KeyMapper, valMapper join.ValueMapper) Stream {
panic("implement me")
}
func (s *kStream) Process(processor processors.ProcessFunc) Stream {
p := &processors.Processor{
ProcessFunc: processor,
Id: atomic.AddInt32(&nodeCounter, 1),
}
s.Node.AddChild(p)
s.NodeBuilder.AddChildBuilder(p)
processed := newKStream(nil, nil, nil, s)
processed.Node = p
processed.NodeBuilder = p
processed.rootStream = s.rootStream
processed.keySelected = s.keySelected
return processed
}
func (s *kStream) Through(topic string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...SinkOption) Stream {
if keyEncoder == nil {
log.Fatal(`k-stream.kStream`, fmt.Sprintf(`keyEncoder cannot be null for sink [%s]`, topic))
}
if valEncoder == nil {
log.Fatal(`k-stream.kStream`, fmt.Sprintf(`valEncoder cannot be null for sink [%s]`, topic))
}
s.To(topic, keyEncoder, valEncoder, options...)
stream := newKStream(func(prefix string) string { return topic }, keyEncoder, valEncoder, nil)
s.streams = append(s.streams, stream)
return stream
}
func (s *kStream) Materialize(topic, storeName string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...processors.MaterializeOption) Stream {
m := processors.NewMaterializeBuilder(topic, storeName, s.rootStream.config.builder.storeRegistry, atomic.AddInt32(&nodeCounter, 1), options...)
s.Node.AddChild(m)
s.NodeBuilder.AddChildBuilder(m)
materialized := newKStream(nil, nil, nil, s)
materialized.Node = m
materialized.NodeBuilder = m
materialized.rootStream = s.rootStream
materialized.keySelected = s.keySelected
materialized.To(topic, keyEncoder, valEncoder)
return materialized
}
func (s *kStream) To(topic string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...SinkOption) {
if keyEncoder == nil {
log.Fatal(`k-stream.kStream`, fmt.Sprintf(`keyEncoder cannot be null for sink [%s]`, topic))
}
if valEncoder == nil {
log.Fatal(`k-stream.kStream`, fmt.Sprintf(`valEncoder cannot be null for sink [%s]`, topic))
}
sink := NewKSinkBuilder(
`sink_`+topic,
atomic.AddInt32(&nodeCounter, 1),
func(prefix string) string { return topic },
keyEncoder,
valEncoder,
options...)
sink.info = map[string]string{
`topic`: topic,
}
//sink := &KSink{
// name: `sink_` + topic,
// topic: func(prefix string)(string){return topic},
// KeyEncoderBuilder: keyEncoder,
// ValEncoderBuilder: valEncoder,
// info: map[string]string{
// `topic`: topic,
// },
//}
//sink.applyOptions(options...)
s.Node.AddChild(sink)
s.NodeBuilder.AddChildBuilder(sink)
}
func (s *kStream) Build() ([]*kStream, error) {
var streams []*kStream
t := new(topology.TopologyBuilder)
t.Source = s.source
err := s.build(s.NodeBuilder)
if err != nil {
return nil, err
}
t.SourceNodeBuilder = s.NodeBuilder
s.topology = t
streams = append(streams, s)
// Build other topologies
for _, tOther := range s.streams {
tps, err := tOther.Build()
if err != nil {
return nil, err
}
streams = append(streams, tps...)
}
return streams, nil
}
func (s *kStream) build(node topology.NodeBuilder) error {
switch nd := node.(type) {
case *join.GlobalTableJoiner:
nd.Registry = s.config.builder.storeRegistry
case *KSink:
if nd.ProducerBuilder == nil {
nd.ProducerBuilder = s.config.builder.defaultBuilders.Producer
}
nd.TopicPrefix = s.config.builder.config.ApplicationId + `_`
}
for _, nodeBuilder := range node.ChildBuilders() {
err := s.build(nodeBuilder)
if err != nil {
return err
}
}
return nil
}
type SourceNode struct {
Id int32
childs []topology.Node
childBuilders []topology.NodeBuilder
}
func (sn *SourceNode) Name() string {
panic("implement me")
}
func (sn *SourceNode) Close() {
panic("implement me")
}
func (sn *SourceNode) Childs() []topology.Node {
return sn.childs
}
func (sn *SourceNode) ChildBuilders() []topology.NodeBuilder {
return sn.childBuilders
}
func (sn *SourceNode) AddChildBuilder(builder topology.NodeBuilder) {
sn.childBuilders = append(sn.childBuilders, builder)
}
func (sn *SourceNode) AddChild(node topology.Node) {
sn.childs = append(sn.childs, node)
}
func (sn *SourceNode) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range sn.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &SourceNode{
childs: childs,
}, nil
}
func (sn *SourceNode) Next() bool {
panic("implement me")
}
func (sn *SourceNode) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error) {
for _, child := range sn.childs {
_, _, next, err := child.Run(ctx, kIn, vIn)
if err != nil || !next {
return nil, nil, false, err
}
}
return kIn, vIn, true, nil
}
func (sn *SourceNode) Type() topology.Type {
panic("implement me")
}
<|start_filename|>backend/memory/partition_memory.go<|end_filename|>
package memory
import (
"github.com/tryfix/kstream/backend"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"strconv"
"time"
)
type PartitionMemory interface {
backend.Backend
Partitions() []backend.Iterator
}
type partitionMemory struct {
partitionCount int
partitions map[int]backend.Backend
}
func NewPartitionMemoryBackend(partitions int, logger log.Logger, reporter metrics.Reporter) PartitionMemory {
partitionedBackend := &partitionMemory{
partitionCount: partitions,
partitions: make(map[int]backend.Backend),
}
for i := 0; i < partitions; i++ {
conf := NewConfig()
conf.Logger = logger
conf.MetricsReporter = reporter
backend := NewMemoryBackend(conf)
partitionedBackend.partitions[i] = backend
}
return partitionedBackend
}
func (pm *partitionMemory) Name() string {
return `partitioned_memory_backend`
}
func (pm *partitionMemory) Set(key []byte, value []byte, expiry time.Duration) error {
k, err := strconv.Atoi(string(key))
if err != nil {
return err
}
partitionId := k % pm.partitionCount
pm.partitions[partitionId].Set(key, value, expiry)
return nil
}
func (pm *partitionMemory) Get(key []byte) ([]byte, error) {
k, err := strconv.Atoi(string(key))
if err != nil {
return nil, err
}
partitionId := k % pm.partitionCount
return pm.partitions[partitionId].Get(key)
}
func (pm *partitionMemory) RangeIterator(fromKy []byte, toKey []byte) backend.Iterator {
panic("implement me")
}
func (pm *partitionMemory) Iterator() backend.Iterator {
panic("implement me")
}
func (pm *partitionMemory) Delete(key []byte) error {
partitionId, err := strconv.Atoi(string(key))
if err != nil {
return err
}
partitionId = partitionId % pm.partitionCount
return pm.partitions[partitionId].Delete(key)
}
func (m *partitionMemory) Destroy() error { return nil }
func (pm *partitionMemory) SetExpiry(time time.Duration) {}
func (pm *partitionMemory) String() string {
return `partition memory`
}
func (pm *partitionMemory) Persistent() bool {
return false
}
func (pm *partitionMemory) Close() error {
for i := 0; i < pm.partitionCount; i++ {
pm.partitions[i].Close()
}
return nil
}
func (pm *partitionMemory) Partitions() []backend.Iterator {
var iterators []backend.Iterator
for _, partition := range pm.partitions {
iterators = append(iterators, partition.Iterator())
}
return iterators
}
<|start_filename|>kstream/graph/graph.go<|end_filename|>
package graph
import (
"fmt"
"github.com/awalterschulze/gographviz"
"github.com/tryfix/kstream/kstream/branch"
"github.com/tryfix/kstream/kstream/processors"
"github.com/tryfix/kstream/kstream/processors/join"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/kstream/kstream/topology"
"strings"
)
type Graph struct {
parent string
vizGraph *gographviz.Graph
}
func NewGraph() *Graph {
parent := `root`
g := gographviz.NewGraph()
if err := g.SetName(parent); err != nil {
panic(err)
}
if err := g.SetDir(true); err != nil {
panic(err)
}
if err := g.AddAttr(parent, `splines`, `ortho`); err != nil {
panic(err)
}
if err := g.AddAttr(parent, `size`, `"50,12"`); err != nil {
panic(err)
}
if err := g.AddNode(parent, `kstreams`, map[string]string{
`fontcolor`: `grey100`,
`fillcolor`: `limegreen`,
`style`: `filled`,
`label`: `"KStreams"`,
}); err != nil {
panic(err)
}
if err := g.AddNode(parent, `def`, map[string]string{
`shape`: `plaintext`,
`label`: `<
<table BORDER="0" CELLBORDER="1" CELLSPACING="0">
<tr><td WIDTH="50" BGCOLOR="slateblue4"></td> <td><B>Processor Node</B></td></tr>
<tr><td WIDTH="50" BGCOLOR="deepskyblue1"></td><td><B>Global Table</B></td></tr>
<tr><td WIDTH="50" BGCOLOR="grey95"></td><td><B>Store Backend</B></td></tr>
<tr><td WIDTH="50" BGCOLOR="black"></td><td><B>Stream Branch</B></td></tr>
<tr><td WIDTH="50" BGCOLOR="limegreen"></td><td><B>Predicate</B></td></tr>
<tr><td WIDTH="50" BGCOLOR="deepskyblue1"></td><td><B>Source</B></td></tr>
<tr><td WIDTH="50" BGCOLOR="orange"></td><td><B>Sink</B></td></tr>
</table>
>`,
}); err != nil {
panic(err)
}
if err := g.AddNode(`kstreams`, `streams`, nil); err != nil {
panic(err)
}
if err := g.AddEdge(`kstreams`, `streams`, true, nil); err != nil {
panic(err)
}
return &Graph{
parent: parent,
vizGraph: g,
}
}
func (g *Graph) Root(parent string, name string, attrs map[string]string, edgeAttrs map[string]string) {
if err := g.vizGraph.AddSubGraph(g.parent, name, attrs); err != nil {
panic(err)
}
}
func (g *Graph) SubGraph(parent string, name string, attrs map[string]string, edgeAttrs map[string]string) {
if err := g.vizGraph.AddNode(parent, name, attrs); err != nil {
panic(err)
}
}
func (g *Graph) Source(parent string, name string, attrs map[string]string, edgeAttrs map[string]string) {
attrs[`color`] = `black`
attrs[`fillcolor`] = `deepskyblue1`
attrs[`style`] = `filled`
attrs[`shape`] = `oval`
if err := g.vizGraph.AddNode(parent, name, attrs); err != nil {
panic(err)
}
if err := g.vizGraph.AddEdge(parent, name, true, edgeAttrs); err != nil {
panic(err)
}
}
func (g *Graph) GTableStreams(parent string, name string, attrs map[string]string, edgeAttrs map[string]string) {
attrs[`color`] = `black`
attrs[`fillcolor`] = `deepskyblue1`
attrs[`style`] = `filled`
attrs[`shape`] = `oval`
if err := g.vizGraph.AddNode(parent, name, attrs); err != nil {
panic(err)
}
if err := g.vizGraph.AddEdge(parent, name, true, edgeAttrs); err != nil {
panic(err)
}
}
func (g *Graph) Processor(parent string, name string, attrs map[string]string, edgeAttrs map[string]string) {
attrs[`fontcolor`] = `grey100`
attrs[`fillcolor`] = `slateblue4`
attrs[`style`] = `filled`
if err := g.vizGraph.AddNode(g.parent, name, attrs); err != nil {
panic(err)
}
if parent != `` {
if err := g.vizGraph.AddEdge(parent, name, true, edgeAttrs); err != nil {
panic(err)
}
}
}
func (g *Graph) Predicate(parent string, name string, attrs map[string]string, edgeAttrs map[string]string) {
attrs[`fontcolor`] = `black`
attrs[`fillcolor`] = `olivedrab2`
attrs[`shape`] = `rectangle`
attrs[`style`] = `"rounded,filled"`
if err := g.vizGraph.AddNode(g.parent, name, attrs); err != nil {
panic(err)
}
if parent != `` {
if err := g.vizGraph.AddEdge(parent, name, true, edgeAttrs); err != nil {
panic(err)
}
}
}
func (g *Graph) Joiner(parent string, name string, store string, attrs map[string]string, edgeAttrs map[string]string) {
//attrs[`color`] = `black`
attrs[`shape`] = `plaintext`
attrs[`fontsize`] = `11`
//attrs[`style`] = `filled`
if err := g.vizGraph.AddNode(g.parent, name, attrs); err != nil {
panic(err)
}
if err := g.vizGraph.AddEdge(store, name, true, nil); err != nil {
panic(err)
}
if parent != `` {
if err := g.vizGraph.AddEdge(parent, name, true, edgeAttrs); err != nil {
panic(err)
}
}
}
func (g *Graph) StreamJoiner(leftParent string, name string, RightParent string, attrs map[string]string, edgeAttrs map[string]string) {
attrs[`color`] = `brown`
attrs[`shape`] = `square`
attrs[`fontsize`] = `11`
attrs[`style`] = `filled`
if err := g.vizGraph.AddNode(g.parent, name, attrs); err != nil {
panic(err)
}
//if err := g.vizGraph.AddEdge(store, name, true, nil); err != nil {
// panic(err)
//}
if leftParent != `` {
if err := g.vizGraph.AddEdge(leftParent, name, true, edgeAttrs); err != nil {
panic(err)
}
}
}
func (g *Graph) Edge(parent string, name string, attrs map[string]string, edgeAttrs map[string]string) {
if err := g.vizGraph.AddEdge(parent, name, true, attrs); err != nil {
panic(err)
}
}
func (g *Graph) Branch(parent string, name string, async bool, order int, attrs map[string]string, edgeAttrs map[string]string) {
attrs[`fontcolor`] = `grey100`
attrs[`fillcolor`] = `accent3`
attrs[`fontname`] = `Arial`
attrs[`fontsize`] = `14`
attrs[`style`] = `"rounded,filled"`
if err := g.vizGraph.AddNode(g.parent, name, attrs); err != nil {
panic(err)
}
if parent != `` {
adgeAttrs := map[string]string{
`style`: `dashed`,
}
adgeAttrs[`label`] = fmt.Sprintf(`< <B>%d</B> >`, order)
if async {
adgeAttrs[`label`] = `< <B>ASYNC</B> >`
}
if err := g.vizGraph.AddEdge(parent, name, true, adgeAttrs); err != nil {
panic(err)
}
}
}
func (g *Graph) Sink(parent string, name string, attrs map[string]string, edgeAttrs map[string]string) {
attrs[`color`] = `black`
attrs[`fillcolor`] = `orange`
attrs[`style`] = `filled`
attrs[`shape`] = `oval`
if err := g.vizGraph.AddNode(g.parent, name, attrs); err != nil {
panic(err)
}
if parent != `` {
if err := g.vizGraph.AddEdge(parent, name, true, edgeAttrs); err != nil {
panic(err)
}
}
}
func (g *Graph) Store(parent string, store store.Store, attrs map[string]string, edgeAttrs map[string]string) {
attrs[`shape`] = `cylinder`
attrs[`fillcolor`] = `grey95`
attrs[`style`] = `filled`
if err := g.vizGraph.AddNode(g.parent, store.Name(), attrs); err != nil {
panic(err)
}
if parent != `` {
if err := g.vizGraph.AddEdge(parent, store.Name(), true, edgeAttrs); err != nil {
panic(err)
}
}
}
func (g *Graph) RenderTopology(t *topology.TopologyBuilder) {
nName := `streams_` + fmt.Sprint(getId())
sourceName := strings.ReplaceAll(t.Source.Name(), `-`, `_`)
sourceName = strings.ReplaceAll(sourceName, `.`, `_`)
g.Source(`streams`, nName, map[string]string{
`label`: fmt.Sprintf(`"%s"`, nodeInfo(t.Source.SourceType(), sourceName, t.Source.Info())),
`shape`: `square`,
}, nil)
draw(nName, t.SourceNodeBuilder.ChildBuilders(), g)
}
func (g *Graph) Build() string {
return g.vizGraph.String()
}
func draw(parent string, builders []topology.NodeBuilder, graph *Graph) {
var nodeName string
for _, b := range builders {
switch n := b.(type) {
case *processors.Processor:
nName := n.Name() + fmt.Sprint(n.ID())
nodeName = nName
graph.Processor(parent, nName,
map[string]string{
//`label`: fmt.Sprintf(`"%s"`, n.Name()),
`label`: `"PRO"`,
`shape`: `square`,
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case topology.SinkBuilder:
//nName := parent + fmt.Sprint(n.ID())
nName := n.Name() + string(n.Type()) + fmt.Sprint(n.ID())
nName = strings.ReplaceAll(nName, `.`, `_`)
nName = strings.ReplaceAll(nName, `-`, `_`)
nodeName = nName
graph.Sink(parent, nName,
map[string]string{
`label`: fmt.Sprintf(`"%s"`, nodeInfo(n.SinkType(), n.Name(), n.Info())),
`shape`: `square`,
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *branch.Branch:
nName := string(n.Type()) + fmt.Sprint(n.ID())
nodeName = nName
graph.Predicate(parent, nName,
map[string]string{
`label`: fmt.Sprintf(`" P \n %s "`, n.Name),
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *join.GlobalTableJoiner:
nName := string(n.Type()) + fmt.Sprint(n.ID())
nodeName = nName
var typ = `INNER`
if n.Typ == join.LeftJoin {
typ = `LEFT`
}
graph.Joiner(parent, nName, n.Store,
map[string]string{
`label`: fmt.Sprintf(`< <B>%s</B> >`, typ+` JOIN`),
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *processors.Filter:
nName := n.Name() + fmt.Sprint(n.ID())
nodeName = nName
graph.Processor(parent, nName,
map[string]string{
`label`: `"F"`,
`shape`: `square`,
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *processors.Transformer:
nName := n.Name() + fmt.Sprint(n.ID())
nodeName = nName
graph.Processor(parent, nName,
map[string]string{
`label`: `"T"`,
`shape`: `square`,
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *branch.Splitter:
i := n.ID()
nName := string(n.Type()) + fmt.Sprint(i)
nodeName = nName
graph.Branch(parent, nName, false, int(i),
map[string]string{
`label`: fmt.Sprintf(`"%s"`, "BS"),
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *processors.KeySelector:
nName := string(n.Type()) + fmt.Sprint(n.ID())
nodeName = nName
graph.Processor(parent, nName,
map[string]string{
`label`: `"KS"`,
`shape`: `square`,
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *processors.ValueTransformer:
nName := string(n.Type()) + fmt.Sprint(n.ID())
nodeName = nName
graph.Processor(parent, nName,
map[string]string{
`label`: `"TV"`,
`shape`: `square`,
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *join.SideJoiner:
nName := string(n.Type()) + fmt.Sprint(n.ID())
nodeName = nName
graph.StreamJoiner(parent, nName, ``,
map[string]string{
`label`: fmt.Sprintf(`< <B>%s</B> >`, string(n.Type())+`_JOIN`),
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
case *join.StreamJoiner:
nName := string(n.Type()) + fmt.Sprint(n.ID())
nodeName = nName
graph.StreamJoiner(parent, nName, ``,
map[string]string{
`label`: fmt.Sprintf(`< <B>%s</B> >`, string(n.Type())+`_JOIN`),
},
map[string]string{
`label`: fmt.Sprintf(`"%d"`, n.ID()),
})
}
draw(nodeName, b.ChildBuilders(), graph)
}
}
var id int
func getId() int {
id += 1
return id
}
func nodeInfo(typ string, name string, info map[string]string) string {
str := fmt.Sprintf(`type:%s\nname:%s\n`, typ, name)
for p, v := range info {
str += fmt.Sprintf(`%s:%s \n`, p, v)
}
return str
}
<|start_filename|>kstream/topic_builder.go<|end_filename|>
package kstream
import (
"fmt"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/log"
)
type topicBuilder struct {
topics map[string]*admin.Topic
admin admin.KafkaAdmin
logger log.Logger
}
func (tb *topicBuilder) apply(config *admin.Topic) {
if _, ok := tb.topics[config.Name]; ok {
tb.logger.Fatal(fmt.Sprintf(`topic [%s] already exst`, config.Name))
}
tb.topics[config.Name] = config
}
func (tb *topicBuilder) build() {
if len(tb.topics) < 1 {
return
}
tb.logger.Info(`creating changelog topics...`)
if err := tb.admin.CreateTopics(tb.topics); err != nil {
tb.logger.Fatal(err)
}
}
<|start_filename|>kstream/store/store.go<|end_filename|>
package store
import (
"context"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/changelog"
kContext "github.com/tryfix/kstream/kstream/context"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/log"
"time"
)
type Builder func(name string, keyEncoder, valEncoder encoding.Builder, options ...Options) (Store, error)
type IndexedStoreBuilder func(name string, keyEncoder, valEncoder encoding.Builder, indexes []Index, options ...Options) (IndexedStore, error)
type StateStoreBuilder func(name string, keyEncoder, valEncoder encoding.Builder, options ...Options) StateStore
type Store interface {
Name() string
Backend() backend.Backend
KeyEncoder() encoding.Encoder
ValEncoder() encoding.Encoder
Set(ctx context.Context, key, value interface{}, expiry time.Duration) error
Get(ctx context.Context, key interface{}) (value interface{}, err error)
GetRange(ctx context.Context, fromKey, toKey interface{}) (map[interface{}]interface{}, error)
GetAll(ctx context.Context) (Iterator, error)
Delete(ctx context.Context, key interface{}) error
String() string
}
type StateStore interface {
Name() string
Set(key, value interface{}) error
Get(key interface{}) (value interface{}, err error)
GetAll() ([]*data.Record, error)
}
type store struct {
backend backend.Backend
name string
logger log.Logger
keyEncoder encoding.Encoder
valEncoder encoding.Encoder
changelog changelog.Changelog
}
func NewStore(name string, keyEncoder encoding.Encoder, valEncoder encoding.Encoder, options ...Options) (Store, error) {
opts := new(storeOptions)
opts.apply(options...)
if opts.backend == nil {
bk, err := opts.backendBuilder(name)
if err != nil {
opts.logger.Fatal(`k-stream.Store.Registry`, fmt.Sprintf(`backend builder error - %+v`, err))
}
opts.backend = bk
}
store := &store{
name: name,
keyEncoder: keyEncoder,
logger: opts.logger,
valEncoder: valEncoder,
backend: opts.backend,
}
store.backend.SetExpiry(opts.expiry)
if opts.changelogEnable {
panic(`not yet implemented`)
/*p, err := producer.DefaultBuilder(&producer.Option{
Partitioner: producer.Random,
})
if err != nil {
return nil, err
}
topic := name + `_store`
cLog, err := changelog.DefaultBuilder(name, topic, -1, changelog.Producer(p))
opts.changelog = cLog
return &recoverableStore{
Store: store,
changelog: cLog,
}, nil
store.backend.SetExpiry(opts.expiry)*/
}
opts.logger.Info(
fmt.Sprintf(`default store [%s] inited`, name))
return store, nil
}
func (s *store) Name() string {
return s.name
}
func (s *store) String() string {
return fmt.Sprintf(`Backend: %s\nChangelogInfo: %s`, s.Backend().Name(), s.changelog)
}
func (s *store) KeyEncoder() encoding.Encoder {
return s.keyEncoder
}
func (s *store) ValEncoder() encoding.Encoder {
return s.valEncoder
}
func (s *store) Backend() backend.Backend {
return s.backend
}
func (s *store) Set(ctx context.Context, key interface{}, value interface{}, expiry time.Duration) error {
k, err := s.keyEncoder.Encode(key)
if err != nil {
return errors.WithPrevious(err, fmt.Sprintf(`store [%s] key encode error`, s.name))
}
// if value is null remove from store (tombstone)
if value == nil {
return s.backend.Delete(k)
}
v, err := s.valEncoder.Encode(value)
if err != nil {
return errors.WithPrevious(err, fmt.Sprintf(`store [%s] key encode err `, s.name))
}
// if changelog enable write record to the changelog
if s.changelog != nil {
record, err := kContext.RecordFromContext(ctx, k, v)
if err != nil {
return err
}
if err := s.changelog.Put(ctx, record); err != nil {
return err
}
}
return s.backend.Set(k, v, expiry)
}
func (s *store) Get(ctx context.Context, key interface{}) (value interface{}, err error) {
k, err := s.keyEncoder.Encode(key)
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`store [%s] key encode err `, s.name))
}
byt, err := s.backend.Get(k)
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`store [%s] value dose not exist `, s.name))
}
if len(byt) < 1 {
return nil, nil
}
v, err := s.valEncoder.Decode(byt)
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`store [%s] value decode err `, s.name))
}
return v, nil
}
func (s *store) GetRange(ctx context.Context, fromKey interface{}, toKey interface{}) (map[interface{}]interface{}, error) {
i := s.backend.Iterator()
i.SeekToFirst()
vals := make(map[interface{}]interface{})
for i.Valid() {
if i.Error() != nil {
return nil, errors.WithPrevious(i.Error(), fmt.Sprintf(`store [%s] backend key iterator error `, s.name))
}
k, err := s.keyEncoder.Decode(i.Key())
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`store [%s] value decode err `, s.name))
}
if len(i.Value()) < 1 {
vals[k] = nil
i.Next()
}
v, err := s.valEncoder.Decode(i.Value())
if err != nil {
return nil, errors.WithPrevious(err, fmt.Sprintf(`store [%s] value decode err `, s.name))
}
vals[k] = v
i.Next()
}
return vals, nil
}
func (s *store) GetAll(ctx context.Context) (Iterator, error) {
i := s.backend.Iterator()
i.SeekToFirst()
return &iterator{
iterator: i,
keyEncoder: s.keyEncoder,
valEncoder: s.valEncoder,
}, nil
}
func (s *store) Delete(ctx context.Context, key interface{}) (err error) {
k, err := s.keyEncoder.Encode(key)
if err != nil {
return errors.WithPrevious(err, fmt.Sprintf(`store [%s] key encode err `, s.name))
}
// if changelog enable delete record from changelog
if s.changelog != nil {
record, err := kContext.RecordFromContext(ctx, k, nil)
if err != nil {
return err
}
if err := s.changelog.Delete(ctx, record); err != nil {
return err
}
}
return s.backend.Delete(k)
}
<|start_filename|>util/struct_to_map.go<|end_filename|>
package util
import (
"fmt"
"reflect"
"runtime"
"sort"
)
type strToMap struct {
normalized map[string]string
}
func StrToMap(path string, v interface{}) [][]string {
m := strToMap{normalized: make(map[string]string)}
return m.sortAndConvert(path, v)
}
func (n *strToMap) sortAndConvert(parent string, v interface{}) [][]string {
n.split(parent, reflect.ValueOf(v))
return n.sort()
}
func (n *strToMap) sort() [][]string {
var keyVals [][]string
keys := make([]string, 0, len(n.normalized))
for k := range n.normalized {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
keyVals = append(keyVals, []string{k, n.normalized[k]})
}
return keyVals
}
func (n *strToMap) split(parent string, v reflect.Value) {
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
if v.IsZero() {
return
}
types := v.Type()
for i := 0; i < v.NumField(); i++ {
f := v.Field(i)
//println(f.Kind())
path := types.Field(i).Name
if !f.CanInterface() {
continue
}
if parent != `` {
path = parent + `.` + path
}
if f.Kind() == reflect.Interface && f.IsNil() {
n.normalized[path] = `<nil>`
continue
}
if !(f.Kind() == reflect.Interface && f.IsNil()) && f.CanInterface() && f.NumMethod() > 0 {
emty := reflect.Value{}
if vv := f.MethodByName(`String`); vv != emty {
n.normalized[path] = vv.Call(nil)[0].String()
continue
} else if vv := f.MethodByName(`Name`); vv != emty {
n.normalized[path] = vv.Call(nil)[0].String()
continue
}
}
if f.Kind() == reflect.Ptr && f.CanInterface() {
n.split(path, f)
continue
}
if f.Kind() == reflect.Struct {
n.split(path, f)
continue
}
if path != `` {
n.normalized[path] = n.toString(f)
}
}
}
func (n *strToMap) toString(value reflect.Value) string {
switch value.Kind() {
case reflect.Map, reflect.Array, reflect.Slice:
return fmt.Sprintf(`%+v`, value)
case reflect.Int, reflect.Int32, reflect.Int16, reflect.Int64:
return fmt.Sprintf(`%d`, value.Int())
case reflect.Bool:
return fmt.Sprint(value.Bool())
case reflect.Float64, reflect.Float32:
return fmt.Sprint(value.Float())
case reflect.Func:
return runtime.FuncForPC(value.Pointer()).Name()
default:
return value.String()
}
}
<|start_filename|>kstream/store/recoverable_store.go<|end_filename|>
package store
import (
"context"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/changelog"
"github.com/tryfix/log"
)
type RecoverableStore interface {
Store
Recover(ctx context.Context) error
}
type recoverableStore struct {
Store
logger log.Logger
recovering bool
topic string
changelog changelog.Changelog
}
func (s *recoverableStore) Recover(ctx context.Context) error {
s.logger.Info(
fmt.Sprintf(`recovering from store [%s]...`, s.Name()))
var c int
records, err := s.changelog.ReadAll(ctx)
if err != nil {
return errors.WithPrevious(err,
fmt.Sprintf(`cannot recover data for store [%s]`, s.Name()))
}
for _, record := range records {
if err := s.Backend().Set(record.Key, record.Value, 0); err != nil {
return err
}
}
s.logger.Info(
fmt.Sprintf(`[%d] records recovered for store [%s]...`, c, s.Name()))
return nil
}
func (s *recoverableStore) String() string {
return fmt.Sprintf("Backend: %s\nChangelogInfo: %s", s.Backend().Name(), s.changelog)
}
<|start_filename|>examples/example_1/stream/transaction_stream.go<|end_filename|>
package stream
import (
"github.com/tryfix/kstream/examples/example_1/encoders"
kstream "github.com/tryfix/kstream/kstream"
)
func initTransactionStream(builder *kstream.StreamBuilder) kstream.Stream {
return builder.Stream(
`transaction`,
encoders.KeyEncoder,
encoders.TransactionReceivedEncoder,
kstream.WithConfig(map[string]interface{}{
//`stream.processor.retry`: 2,
//`stream.processor.retry.interval`: 3000,
//`stream.processor.changelog`: false,
//`stream.processor.changelog.minInSyncReplicas`: 2,
//`stream.processor.changelog.replicationFactor`: 3,
//`stream.processor.changelog.buffered`: true,
//`stream.processor.changelog.BufferedSize`: 100,
}))
}
<|start_filename|>kstream/topology/source.go<|end_filename|>
package topology
import (
"context"
)
type SourceBuilder interface {
Name() string
Info() map[string]string
SourceType() string
Build() (Source, error)
}
type SinkBuilder interface {
NodeBuilder
Name() string
ID() int32
Info() map[string]string
SinkType() string
}
type Source interface {
Run(ctx context.Context, kIn, vIn []byte) (kOut, vOut interface{}, err error)
Name() string
Close()
}
type Sink interface {
Node
Name() string
Close() error
}
<|start_filename|>kstream/store/option.go<|end_filename|>
package store
import (
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/kstream/changelog"
"github.com/tryfix/log"
"time"
)
type storeOptions struct {
changelog changelog.Changelog
changelogEnable bool
backend backend.Backend
backendBuilder backend.Builder
expiry time.Duration
buffered bool
bufferSize int
compactionEnabled bool
logger log.Logger
}
type Options func(config *storeOptions)
func (c *storeOptions) apply(options ...Options) {
c.logger = log.NewNoopLogger()
for _, opt := range options {
opt(c)
}
}
func ChangelogEnabled() Options {
return func(config *storeOptions) {
config.changelogEnable = true
}
}
func WithChangelog(changelog changelog.Changelog) Options {
return func(config *storeOptions) {
config.changelog = changelog
config.changelogEnable = true
}
}
func Compacated() Options {
return func(options *storeOptions) {
options.compactionEnabled = true
}
}
func Expire(d time.Duration) Options {
return func(options *storeOptions) {
options.expiry = d
}
}
func Buffered(size int) Options {
return func(options *storeOptions) {
options.buffered = true
options.bufferSize = size
}
}
func WithBackend(backend backend.Backend) Options {
return func(config *storeOptions) {
config.backend = backend
}
}
func WithBackendBuilder(builder backend.Builder) Options {
return func(config *storeOptions) {
config.backendBuilder = builder
}
}
func WithLogger(logger log.Logger) Options {
return func(config *storeOptions) {
config.logger = logger
}
}
<|start_filename|>kstream/changelog/store_changelog.go<|end_filename|>
package changelog
func NewStoreChangelog(applicationId string, topic string, partition int32, opts ...Options) (Changelog, error) {
options := new(options)
options.apply(applicationId, opts...)
return &stateChangelog{
topic: topic,
partition: partition,
applicationId: applicationId,
options: options,
//buffer: NewBuffer(),
changelogSuffix: `_store_changelog`,
}, nil
}
<|start_filename|>backend/iterator.go<|end_filename|>
package backend
type Iterator interface {
SeekToFirst()
SeekToLast()
Seek(key []byte)
Next()
Prev()
Close()
Key() []byte
Value() []byte
Valid() bool
Error() error
}
<|start_filename|>examples/example_2/events/abc.go<|end_filename|>
package events
import "encoding/json"
type ABC struct {
ID string `json:"id"`
Type string `json:"type"`
AAA string `json:"aaa"`
BBB string `json:"bbb"`
CCC string `json:"ccc"`
TimestampA int64 `json:"timestamp_a"`
TimestampB int64 `json:"timestamp_b"`
TimestampC int64 `json:"timestamp_c"`
}
func (a ABC) Encode(data interface{}) ([]byte, error) {
b, err := json.Marshal(data)
if err != nil {
return nil, err
}
return b, nil
}
func (a ABC) Decode(data []byte) (interface{}, error) {
ac := ABC{}
err := json.Unmarshal(data, &ac)
if err != nil {
return nil, err
}
return ac, nil
}
<|start_filename|>examples/example_2/encoders/int64_encoder.go<|end_filename|>
package encoders
import (
"github.com/tryfix/errors"
"reflect"
"strconv"
)
type Int64Encoder struct{}
func (Int64Encoder) Encode(v interface{}) ([]byte, error) {
i, ok := v.(int64)
if !ok {
j, k := v.(int)
if !k {
return nil, errors.Errorf(`invalid type [%v] expected int64`, reflect.TypeOf(v))
}
i = int64(j)
}
/*byt := make([]byte, 4)
binary.BigEndian.PutUint32(byt, uint32(i))*/
return []byte(strconv.FormatInt(i, 10)), nil
}
func (Int64Encoder) Decode(data []byte) (interface{}, error) {
i, err := strconv.ParseInt(string(data), 10, 64)
if err != nil {
return nil, errors.WithPrevious(err, `cannot decode data`)
}
return i, nil
//return int(binary.BigEndian.Uint32(data)), nil
}
<|start_filename|>kstream/changelog/options.go<|end_filename|>
package changelog
import (
"github.com/tryfix/kstream/producer"
"time"
)
type options struct {
buffered bool
bufferSize int
flushInterval time.Duration
producer producer.Producer
}
type Options func(config *options)
func (c *options) apply(id string, options ...Options) error {
if err := c.applyDefaults(id); err != nil {
return err
}
for _, opt := range options {
opt(c)
}
return nil
}
func (c *options) applyDefaults(id string) error {
return nil
}
func Producer(p producer.Producer) Options {
return func(config *options) {
config.producer = p
}
}
func Buffered(size int) Options {
return func(config *options) {
config.buffered = true
config.bufferSize = size
}
}
func FlushInterval(d time.Duration) Options {
return func(config *options) {
config.flushInterval = d
}
}
<|start_filename|>data/record.go<|end_filename|>
package data
import (
"bytes"
"fmt"
"github.com/Shopify/sarama"
"github.com/google/uuid"
"time"
)
//type RecordHeaders interface {
// Read(name []byte) []byte
// All() []*sarama.RecordHeader
//}
type RecordHeaders []*sarama.RecordHeader
func (h RecordHeaders) Read(name []byte) []byte {
for _, header := range h {
if bytes.Equal(header.Key, name) {
return header.Value
}
}
return nil
}
func (h RecordHeaders) All() []*sarama.RecordHeader {
return h
}
type Record struct {
Key, Value []byte
Topic string
Partition int32
Offset int64
Timestamp time.Time // only set if kafka is version 0.10+, inner message timestamp
BlockTimestamp time.Time // only set if kafka is version 0.10+, outer (compressed) block timestamp
Headers RecordHeaders // only set if kafka is version 0.11+
UUID uuid.UUID
}
func (r *Record) String() string {
return fmt.Sprintf(`%s_%d_%d`, r.Topic, r.Partition, r.Offset)
}
func (r *Record) RecordKey() interface{} {
return r.Key
}
func (r *Record) RecordValue() interface{} {
return r.Value
}
<|start_filename|>producer/pool.go<|end_filename|>
package producer
import (
"context"
"github.com/tryfix/kstream/data"
"hash"
"hash/fnv"
)
type Pool struct {
NumOfWorkers int64
producers map[int64]Producer
hasher hash.Hash32
}
func NewPool(NumOfWorkers int, builder Builder) (*Pool, error) {
producers := make(map[int64]Producer)
pool := &Pool{
NumOfWorkers: int64(NumOfWorkers),
producers: producers,
hasher: fnv.New32a(),
}
for i := 0; i < NumOfWorkers; i++ {
p, err := builder(new(Config))
if err != nil {
return nil, err
}
pool.producers[int64(i)] = p
}
return pool, nil
}
func (p *Pool) Produce(ctx context.Context, message *data.Record) (partition int32, offset int64, err error) {
producer, err := p.producer(message.Key)
if err != nil {
return 0, 0, err
}
return producer.Produce(ctx, message)
}
func (p *Pool) ProduceBatch(ctx context.Context, messages []*data.Record) error {
producer, err := p.producer(messages[0].Key)
if err != nil {
return err
}
return producer.ProduceBatch(ctx, messages)
}
func (p *Pool) Close() error {
for _, producer := range p.producers {
if err := producer.Close(); err != nil {
println(err)
}
}
return nil
}
func (p *Pool) producer(key []byte) (Producer, error) {
p.hasher.Reset()
_, err := p.hasher.Write(key)
if err != nil {
return nil, err
}
w := int64(p.hasher.Sum32()) % p.NumOfWorkers
return p.producers[w], nil
}
<|start_filename|>kstream/offsets/mock_manager.go<|end_filename|>
package offsets
import "github.com/tryfix/kstream/admin"
type MockManager struct {
Topics *admin.Topics
}
func (m *MockManager) OffsetValid(topic string, partition int32, offset int64) (isValid bool, err error) {
oldest, err := m.GetOffsetOldest(topic, partition)
if err != nil {
return
}
latest, err := m.GetOffsetLatest(topic, partition)
if err != nil {
return
}
return offsetValid(offset, oldest, latest), nil
}
func (m *MockManager) GetOffsetLatest(topic string, partition int32) (offset int64, err error) {
tp, err := m.Topics.Topic(topic)
if err != nil {
return
}
pt, err := tp.Partition(int(partition))
if err != nil {
return
}
return pt.Latest(), nil
}
func (m *MockManager) GetOffsetOldest(topic string, partition int32) (offset int64, err error) {
return 0, nil
}
func (m *MockManager) Close() error {
return nil
}
<|start_filename|>kstream/store/indexed_store_test.go<|end_filename|>
package store
import (
"context"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/kstream/encoding"
"reflect"
"strings"
"sync"
"testing"
)
func Test_indexedStore_Delete(t *testing.T) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
i := &indexedStore{
Store: NewMockStore(`foo`, encoding.StringEncoder{}, encoding.StringEncoder{}, backend.NewMockBackend(`foo`, 0)),
indexes: map[string]Index{`foo`: index},
mu: new(sync.Mutex),
}
if err := i.Set(context.Background(), `200`, `111,222`, 0); err != nil {
t.Error(err)
}
if err := i.Set(context.Background(), `300`, `111,333`, 0); err != nil {
t.Error(err)
}
if err := i.Delete(context.Background(), `200`); err != nil {
t.Error(err)
}
data, err := index.Read(`111`)
if err != nil {
t.Error(err)
}
if !reflect.DeepEqual(data, []interface{}{`300`}) {
t.Errorf(`want []string{300}, have %#v`, data)
}
}
func Test_indexedStore_Set(t *testing.T) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
i := &indexedStore{
Store: NewMockStore(`foo`, encoding.StringEncoder{}, encoding.StringEncoder{}, backend.NewMockBackend(`foo`, 0)),
indexes: map[string]Index{`foo`: index},
mu: new(sync.Mutex),
}
if err := i.Set(context.Background(), `200`, `111,222`, 0); err != nil {
t.Error(err)
}
if err := i.Set(context.Background(), `300`, `111,333`, 0); err != nil {
t.Error(err)
}
data, err := index.Read(`111`)
if err != nil {
t.Error(data)
}
var want []interface{}
for _, r := range data {
if r.(string) == `200` || r.(string) == `300` {
want = append(want, r)
}
}
if len(want) < 2 {
t.Fail()
}
}
func TestIndexedStore_GetIndexedRecords(t *testing.T) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
i := &indexedStore{
Store: NewMockStore(`foo`, encoding.StringEncoder{}, encoding.StringEncoder{}, backend.NewMockBackend(`foo`, 0)),
indexes: map[string]Index{`foo`: index},
mu: new(sync.Mutex),
}
if err := i.Set(context.Background(), `200`, `111,222`, 0); err != nil {
t.Error(err)
}
if err := i.Set(context.Background(), `300`, `111,333`, 0); err != nil {
t.Error(err)
}
if err := i.Set(context.Background(), `400`, `222,333`, 0); err != nil {
t.Error(err)
}
data, err := i.GetIndexedRecords(context.Background(), `foo`, `111`)
if err != nil {
t.Error(data)
}
var want []interface{}
for _, r := range data {
if r.(string) == `111,222` || r.(string) == `111,333` {
want = append(want, r)
}
}
if len(want) < 2 {
t.Fail()
}
}
<|start_filename|>examples/example_1/events/customer_profile.go<|end_filename|>
package events
import (
"encoding/json"
"github.com/google/uuid"
)
type CustomerProfileUpdated struct {
ID uuid.UUID `json:"id"`
Type string `json:"type"`
Body struct {
CustomerID uuid.UUID `json:"customer_id"`
CustomerName string `json:"customer_name"`
NIC string `json:"nic"`
ContactDetails struct {
Phone string `json:"phone"`
Email string `json:"email"`
Address string `json:"address"`
} `json:"contact_details"`
DateOfBirth string `json:"date_of_birth"`
UpdatedAt int64 `json:"updated_at"`
} `json:"body"`
Timestamp int64 `json:"timestamp"`
}
func (c CustomerProfileUpdated) Encode(data interface{}) ([]byte, error) {
b, err := json.Marshal(data)
if err != nil {
return nil, err
}
return b, nil
}
func (c CustomerProfileUpdated) Decode(data []byte) (interface{}, error) {
cp := CustomerProfileUpdated{}
err := json.Unmarshal(data, &cp)
if err != nil {
return nil, err
}
return cp, nil
}
<|start_filename|>kstream/store/indexed_store_bench_test.go<|end_filename|>
package store
import (
"context"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/backend/memory"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"math/rand"
"strconv"
"strings"
"sync"
"testing"
)
func BenchmarkIndexedStore_Set(b *testing.B) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
i := &indexedStore{
Store: NewMockStore(`foo`, encoding.StringEncoder{}, encoding.StringEncoder{}, backend.NewMockBackend(`foo`, 0)),
indexes: map[string]Index{`foo`: index},
mu: new(sync.Mutex),
}
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := i.Set(context.Background(), strconv.Itoa(rand.Intn(99999)+1), `111,222`, 0); err != nil {
b.Error(err)
}
}
})
}
func BenchmarkIndexedStore_GetIndexedRecords(b *testing.B) {
indexedStore := NewMockStore(`foo`, encoding.StringEncoder{}, encoding.StringEncoder{}, backend.NewMockBackend(`foo`, 0))
for i := 1; i < 99909; i++ {
compKey := strconv.Itoa(rand.Intn(4)+1) + `:` + strconv.Itoa(i)
if err := indexedStore.Set(context.Background(), strconv.Itoa(i), compKey, 0); err != nil {
b.Error(err)
}
}
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `:`)[0]
})
conf := memory.NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
st, err := NewIndexedStore(
`foo`,
encoding.StringEncoder{},
encoding.StringEncoder{},
[]Index{index},
WithBackend(memory.NewMemoryBackend(conf)))
if err != nil {
b.Error(err)
}
for i := 1; i < 99909; i++ {
compKey := strconv.Itoa(rand.Intn(4)+1) + `:` + strconv.Itoa(i)
if err := st.Set(context.Background(), strconv.Itoa(i), compKey, 0); err != nil {
b.Error(err)
}
}
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := st.GetIndexedRecords(context.Background(), `foo`, strconv.Itoa(rand.Intn(4)+1)); err != nil {
b.Error(err)
}
}
})
}
<|start_filename|>backend/mock_backend.go<|end_filename|>
package backend
import (
"sync"
"time"
)
type MockBackend struct {
name string
data map[string][]byte
mu *sync.Mutex
expiry time.Duration
}
func NewMockBackend(name string, expiry time.Duration) Backend {
b := &MockBackend{
name: name,
data: make(map[string][]byte),
mu: new(sync.Mutex),
}
if expiry > 0 {
b.expiry = expiry
}
return b
}
func (b *MockBackend) Name() string {
return b.name
}
func (b *MockBackend) Persistent() bool {
return false
}
func (b *MockBackend) Set(key []byte, value []byte, expiry time.Duration) error {
b.mu.Lock()
defer b.mu.Unlock()
var del = func() {
time.Sleep(expiry)
b.Delete(key)
}
if expiry > 0 {
go del()
} else {
if b.expiry > 0 {
go del()
}
}
b.data[string(key)] = value
return nil
}
func (b *MockBackend) Get(key []byte) ([]byte, error) {
b.mu.Lock()
defer b.mu.Unlock()
v, ok := b.data[string(key)]
if !ok {
return nil, nil
}
return v, nil
}
func (b *MockBackend) RangeIterator(fromKy []byte, toKey []byte) Iterator {
panic("implement me")
}
func (*MockBackend) Iterator() Iterator {
panic("implement me")
}
func (b *MockBackend) Delete(key []byte) error {
b.mu.Lock()
defer b.mu.Unlock()
delete(b.data, string(key))
return nil
}
func (b *MockBackend) SetExpiry(time time.Duration) {
b.expiry = time
}
func (b *MockBackend) String() string {
return b.name
}
func (*MockBackend) Close() error {
panic("implement me")
}
func (*MockBackend) Destroy() error {
panic("implement me")
}
<|start_filename|>kstream/processors/join/window.go<|end_filename|>
package join
import "sync"
type Window struct {
l *sync.Mutex
window map[interface{}]interface{}
}
func NewWindow() *Window {
return &Window{
l: new(sync.Mutex),
window: make(map[interface{}]interface{}),
}
}
func (w *Window) Write(key, value interface{}) {
w.l.Lock()
defer w.l.Unlock()
w.window[key] = value
}
func (w *Window) Read(key interface{}) (interface{}, bool) {
w.l.Lock()
defer w.l.Unlock()
v, ok := w.window[key]
return v, ok
}
<|start_filename|>kstream/streams.go<|end_filename|>
package kstream
import (
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"sync"
)
type StreamInstance struct {
id string
streams map[string]*kStream // topic:topology
topics []string
processorPool *processorPool
numOfParallelConsumers int
stopping chan bool
stopped chan bool
logger log.Logger
consumer consumer.Builder
builder *StreamBuilder
instancesOptions *instancesOptions
reBalanceHandler consumer.ReBalanceHandler
}
type instancesOptions struct {
notifyOnSynced chan bool
reBalanceHandler consumer.ReBalanceHandler
consumerOptions []consumer.Option
}
func NotifyOnStart(c chan bool) InstancesOptions {
return func(config *instancesOptions) {
config.notifyOnSynced = c
}
}
func WithReBalanceHandler(h consumer.ReBalanceHandler) InstancesOptions {
return func(config *instancesOptions) {
config.reBalanceHandler = h
}
}
func WithConsumerOptions(opt consumer.Option) InstancesOptions {
return func(config *instancesOptions) {
config.consumerOptions = append(config.consumerOptions, opt)
}
}
type InstancesOptions func(config *instancesOptions)
func (iOpts *instancesOptions) apply(options ...InstancesOptions) {
for _, o := range options {
o(iOpts)
}
}
type Instances struct {
streams map[int]*StreamInstance
globalTables map[string]*globalKTable
globalTableStream *globalTableStream
options *instancesOptions
logger log.Logger
builder *StreamBuilder
metricsReporter metrics.Reporter
}
func NewStreams(builder *StreamBuilder, options ...InstancesOptions) *Instances {
opts := new(instancesOptions)
opts.apply(options...)
instances := &Instances{
streams: make(map[int]*StreamInstance),
globalTables: builder.globalTables,
options: opts,
builder: builder,
metricsReporter: builder.metricsReporter,
logger: builder.logger.NewLog(log.Prefixed(`streams`)),
}
if len(builder.streams) < 1 {
return instances
}
for i := 1; i <= builder.config.ConsumerCount; i++ {
id := fmt.Sprintf(`streams-%d`, i)
logger := builder.logger.NewLog(log.Prefixed(id))
instance := &StreamInstance{
id: id,
streams: builder.streams,
processorPool: newProcessorPool(id, builder.streams, builder.defaultBuilders.changelog, logger, builder.metricsReporter),
numOfParallelConsumers: builder.config.ConsumerCount,
stopping: make(chan bool, 1),
stopped: make(chan bool, 1),
logger: logger,
consumer: builder.defaultBuilders.Consumer,
builder: builder,
instancesOptions: opts,
}
if opts.reBalanceHandler != nil {
instance.reBalanceHandler = opts.reBalanceHandler
}
for t := range builder.streams {
instance.topics = append(instance.topics, t)
}
instances.streams[i] = instance
}
return instances
}
func (ins *Instances) Start() (err error) {
defer ins.logger.Info(`k-stream shutdown completed`)
/*var syncableReplicas []consumer.TopicPartition
// start changelog replica syncing
for _, stream := range ins.builder.streams {
if stream.changelog.enabled && stream.changelog.replicated {
for i := 0; i <= stream.changelog.topic.numOfPartitions-1; i++ {
// already allocated partition doesnt need a replica syncer
syncableReplicas = append(syncableReplicas, consumer.TopicPartition{
Topic: stream.changelog.topic.name, Partition: int32(i),
})
}
}
}
if len(syncableReplicas) > 0 {
if err := ins.builder.changelogReplicaManager.StartReplicas(syncableReplicas); err != nil {
ins.logger.Fatal( err)
}
}*/
wg := new(sync.WaitGroup)
// start global table streams
if len(ins.globalTables) > 0 {
//wg.Add(1)
ins.globalTableStream, err = newGlobalTableStream(ins.globalTables, &GlobalTableStreamConfig{
ConsumerBuilder: ins.builder.defaultBuilders.PartitionConsumer,
Logger: ins.logger,
KafkaAdmin: ins.builder.defaultBuilders.KafkaAdmin,
OffsetManager: ins.builder.defaultBuilders.OffsetManager,
Metrics: ins.metricsReporter,
BackendBuilder: ins.builder.defaultBuilders.Backend,
})
if err != nil {
return errors.WithPrevious(err, `global table stream start failed`)
}
ins.globalTableStream.StartStreams(wg)
if len(ins.streams) < 1 {
if ins.options.notifyOnSynced != nil {
ins.options.notifyOnSynced <- true
}
wg.Wait()
return nil
}
}
for _, instance := range ins.streams {
// start stream consumer
wg.Add(1)
go func(wg *sync.WaitGroup, i *StreamInstance) {
if err := i.Start(wg); err != nil {
ins.logger.Fatal(err)
}
}(wg, instance)
}
if ins.options.notifyOnSynced != nil {
ins.options.notifyOnSynced <- true
}
wg.Wait()
return nil
}
// Stop stops all the running Streams Instances and then GlobalTables
func (ins *Instances) Stop() {
if len(ins.streams) > 0 {
// stop all the streams first
wg := &sync.WaitGroup{}
for _, instance := range ins.streams {
wg.Add(1)
go func(w *sync.WaitGroup, i *StreamInstance) {
i.Stop()
w.Done()
}(wg, instance)
}
wg.Wait()
}
// stop running global tables
if len(ins.globalTables) > 0 {
ins.globalTableStream.stop()
}
}
// starts the high level consumer for all streams
func (s *StreamInstance) Start(wg *sync.WaitGroup) error {
defer wg.Done()
/*config.OnRebalanced = func(allocation consumer.Allocation) {
// consumer booting logic
if len(allocation.Assigned) > 0 {
logger.DefaultLogger.Warn( `partitions added`, allocation.Assigned)
for _, tp := range allocation.Assigned {
// stop started replicas
if s.streams[tp.Topic].changelog.replicated {
if err := s.builder.changelogReplicaManager.StopReplicas([]consumer.TopicPartition{
{Topic: s.streams[tp.Topic].changelog.topic.name, Partition: tp.Partition},
}); err != nil {
logger.DefaultLogger.Error( err)
}
}
if err := s.processorPool.addProcessor(tp); err != nil {
logger.DefaultLogger.Fatal( `allocation failed due to `, err)
}
pro := s.processorPool.Processor(tp)
if err := pro.boot(); err != nil {
logger.DefaultLogger.Fatal(`k-stream.consumer`,
fmt.Sprintf("cannot boot processor due to : %+v", err))
}
}
}
if len(allocation.Removed) > 0 {
logger.DefaultLogger.Warn( `partitions removed`, allocation.Removed)
// start changelog replica syncers
var syncableReplicas []consumer.TopicPartition
for _, tp := range allocation.Removed {
if s.streams[tp.Topic].changelog.replicated {
syncableReplicas = append(syncableReplicas, consumer.TopicPartition{
Topic: s.streams[tp.Topic].changelog.topic.name, Partition: tp.Partition,
})
}
}
if len(syncableReplicas) > 0 {
if err := s.builder.changelogReplicaManager.StartReplicas(syncableReplicas); err != nil {
logger.DefaultLogger.Error( fmt.Sprintf(`cannot start replicas due to %s`, err))
}
}
}
go func() {
if s.allocationNotify != nil {
s.allocationNotify <- allocation
}
}()
}*/
c, err := s.consumer.Build(
consumer.BuilderWithId(fmt.Sprintf(`group_consumer_%s`, s.id)),
consumer.BuilderWithOptions(s.instancesOptions.consumerOptions...),
)
if err != nil {
return errors.WithPrevious(err, `cannot initiate consumer`)
}
reBalancer := &reBalanceHandler{
userHandler: s.reBalanceHandler,
logger: s.logger,
processors: s.processorPool,
builder: s.builder,
}
partitionConsumers, err := c.Consume(s.topics, reBalancer)
if err != nil {
return errors.WithPrevious(err, `cannot start consumer`)
}
consumerWg := new(sync.WaitGroup)
consumerWg.Add(1)
go func(consumerWg *sync.WaitGroup) {
<-s.stopping
if err := c.Close(); err != nil {
s.logger.Fatal(err)
}
consumerWg.Done()
s.stopped <- true
}(consumerWg)
go func() {
for err := range c.Errors() {
s.logger.Info(err)
}
}()
wgx := new(sync.WaitGroup)
for partition := range partitionConsumers {
wgx.Add(1)
go func(wg *sync.WaitGroup, p consumer.Partition) {
pro := s.processorPool.Processor(p.Partition())
go func(processor *processor) {
for record := range processor.changelogMarks {
if err := p.CommitOffset(record); err != nil {
s.logger.Error(fmt.Sprintf("cannot commit partition offset due to : %+v", err))
}
}
}(pro)
pro.records = p.Records()
pro.start()
wg.Done()
}(wgx, partition)
}
wgx.Wait()
consumerWg.Wait()
return nil
}
func (s *StreamInstance) Stop() {
s.stopping <- true
<-s.stopped
s.logger.Info(`instance stopped`)
}
<|start_filename|>examples/example_1/events/message.go<|end_filename|>
package events
import (
"encoding/json"
"github.com/google/uuid"
)
type MessageCreated struct {
ID string `json:"id"`
Type string `json:"type"`
Body struct {
CustomerID uuid.UUID `json:"customer_id"`
Text string `json:"text"`
Phone string `json:"phone"`
Email string `json:"email"`
Address string `json:"address"`
} `json:"body"`
Timestamp int64 `json:"timestamp"`
}
func (m MessageCreated) Encode(data interface{}) ([]byte, error) {
b, err := json.Marshal(data)
if err != nil {
return nil, err
}
return b, nil
}
func (m MessageCreated) Decode(data []byte) (interface{}, error) {
mc := MessageCreated{}
err := json.Unmarshal(data, &mc)
if err != nil {
return nil, err
}
return mc, nil
}
<|start_filename|>admin/mock_topics.go<|end_filename|>
package admin
import (
"errors"
"github.com/Shopify/sarama"
"github.com/tryfix/kstream/data"
"sync"
)
type MockPartition struct {
records []*data.Record
*sync.Mutex
}
func (p *MockPartition) Append(r *data.Record) error {
p.Lock()
defer p.Unlock()
if len(p.records) > 0 {
r.Offset = int64(len(p.records))
}
//println(`appended`, r.Partition, r.Offset)
p.records = append(p.records, r)
return nil
}
func (p *MockPartition) Latest() int64 {
p.Lock()
defer p.Unlock()
if len(p.records) < 1 {
return 0
}
return p.records[len(p.records)-1].Offset
}
func (p *MockPartition) FetchAll() (records []*data.Record) {
p.Lock()
defer p.Unlock()
return p.records
}
func (p *MockPartition) Fetch(start int64, limit int) (records []*data.Record, err error) {
p.Lock()
defer p.Unlock()
if len(p.records) < 1 {
return
}
if start == -1 /* latest offset */ {
// get latest record
start = int64(len(p.records))
}
if start == -2 /* oldest offset */ {
start = 0
}
if start > int64(len(p.records)) {
return
//return nil, sarama.ErrOffsetOutOfRange
}
var from = start
var to = limit
if start > 0 {
from = start
to = int(start) + limit
}
if to > len(p.records) {
to = len(p.records)
}
chunk := p.records[from:to]
//println(`from`, from, `to`, to, `recs`, len(chunk), `ttt`, len(p.records))
var count int
for _, rec := range chunk {
if count == limit {
break
}
records = append(records, rec)
count++
}
return
}
type MockTopic struct {
Name string
partitions []*MockPartition
Meta *Topic
mu *sync.Mutex
}
func (tp *MockTopic) AddPartition(id int) error {
tp.mu.Lock()
defer tp.mu.Unlock()
tp.partitions[id] = &MockPartition{
records: make([]*data.Record, 0),
Mutex: new(sync.Mutex),
}
return nil
}
func (tp *MockTopic) Partition(id int) (*MockPartition, error) {
tp.mu.Lock()
defer tp.mu.Unlock()
return tp.partitions[id], nil
}
func (tp *MockTopic) Partitions() []*MockPartition {
tp.mu.Lock()
defer tp.mu.Unlock()
return tp.partitions
}
type Topics struct {
*sync.Mutex
topics map[string]*MockTopic
}
func NewMockTopics() *Topics {
return &Topics{
topics: make(map[string]*MockTopic),
Mutex: new(sync.Mutex),
}
}
func (td *Topics) AddTopic(topic *MockTopic) error {
td.Lock()
defer td.Unlock()
_, ok := td.topics[topic.Name]
if ok {
return errors.New(`topic already exists`)
}
topic.mu = new(sync.Mutex)
topic.partitions = make([]*MockPartition, topic.Meta.NumPartitions)
for i := int32(0); i < topic.Meta.NumPartitions; i++ {
topic.Meta.Partitions = append(topic.Meta.Partitions, Partition{
Id: i,
Error: nil,
})
if err := topic.AddPartition(int(i)); err != nil {
return err
}
}
td.topics[topic.Name] = topic
return nil
}
func (td *Topics) RemoveTopic(name string) error {
td.Lock()
defer td.Unlock()
_, ok := td.topics[name]
if ok {
return errors.New(`topic does not exists`)
}
delete(td.topics, name)
return nil
}
func (td *Topics) Topic(name string) (*MockTopic, error) {
td.Lock()
defer td.Unlock()
t, ok := td.topics[name]
if !ok {
return t, sarama.ErrUnknownTopicOrPartition
}
return t, nil
}
func (td *Topics) Topics() map[string]*MockTopic {
td.Lock()
defer td.Unlock()
return td.topics
}
func (tp *MockTopic) FetchAll() (records []*data.Record) {
tp.mu.Lock()
defer tp.mu.Unlock()
rec := make([]*data.Record, 0)
for _, pt := range tp.partitions {
rec = append(rec, pt.FetchAll()...)
}
return rec
}
<|start_filename|>kstream/processors/filter_test.go<|end_filename|>
package processors
import (
"context"
"errors"
"testing"
)
var f FilterFunc = func(ctx context.Context, key, value interface{}) (b bool, err error) {
k, ok := key.(int)
if !ok {
return false, errors.New(`invalid type`)
}
return k == 1, nil
}
var filter = &Filter{
FilterFunc: f,
}
func TestFilter_Process_Should_Filter(t *testing.T) {
k, v, next, err := filter.Run(context.Background(), 1, nil)
if err != nil {
t.Error(err)
}
if !next {
t.Fail()
}
if k != 1 {
t.Fail()
}
if v != nil {
t.Fail()
}
}
func TestFilter_Process_Should_Return_Org_Vals_On_Error(t *testing.T) {
kOrg := `100`
vOrg := `100`
k, v, next, err := filter.Run(context.Background(), kOrg, vOrg)
if err == nil {
t.Fail()
}
if next {
t.Fail()
}
if k != nil {
t.Fail()
}
if v != nil {
t.Fail()
}
}
<|start_filename|>backend/memory/memory.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package memory
import (
"github.com/tryfix/kstream/backend"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"sync"
"time"
)
type memoryRecord struct {
key []byte
value []byte
createdAt time.Time
expiry time.Duration
}
type config struct {
ExpiredRecordCleanupInterval time.Duration
MetricsReporter metrics.Reporter
Logger log.Logger
}
func NewConfig() *config {
conf := new(config)
conf.parse()
return conf
}
func (c *config) parse() {
if c.ExpiredRecordCleanupInterval == time.Duration(0) {
c.ExpiredRecordCleanupInterval = time.Second
}
if c.Logger == nil {
c.Logger = log.NewNoopLogger()
}
if c.MetricsReporter == nil {
c.MetricsReporter = metrics.NoopReporter()
}
}
type memory struct {
expiredRecordCleanupInterval time.Duration
records *sync.Map
logger log.Logger
metrics struct {
readLatency metrics.Observer
updateLatency metrics.Observer
deleteLatency metrics.Observer
storageSize metrics.Gauge
}
}
func Builder(config *config) backend.Builder {
return func(name string) (backend backend.Backend, err error) {
return NewMemoryBackend(config), nil
}
}
func NewMemoryBackend(config *config) backend.Backend {
m := &memory{
expiredRecordCleanupInterval: config.ExpiredRecordCleanupInterval,
logger: config.Logger,
records: new(sync.Map),
}
labels := []string{`name`, `type`}
m.metrics.readLatency = config.MetricsReporter.Observer(metrics.MetricConf{Path: `backend_read_latency_microseconds`, Labels: labels})
m.metrics.updateLatency = config.MetricsReporter.Observer(metrics.MetricConf{Path: `backend_update_latency_microseconds`, Labels: labels})
m.metrics.storageSize = config.MetricsReporter.Gauge(metrics.MetricConf{Path: `backend_storage_size`, Labels: labels})
m.metrics.deleteLatency = config.MetricsReporter.Observer(metrics.MetricConf{Path: `backend_delete_latency_microseconds`, Labels: labels})
go m.runCleaner()
return m
}
func (m *memory) runCleaner() {
ticker := time.NewTicker(m.expiredRecordCleanupInterval)
for range ticker.C {
records := m.snapshot()
for _, record := range records {
if record.expiry > 0 && time.Since(record.createdAt).Nanoseconds() > record.expiry.Nanoseconds() {
if err := m.Delete(record.key); err != nil {
m.logger.Error(err)
}
}
}
}
}
func (m *memory) snapshot() []memoryRecord {
records := make([]memoryRecord, 0)
m.records.Range(func(key, value interface{}) bool {
records = append(records, value.(memoryRecord))
return true
})
return records
}
func (m *memory) Name() string {
return `memory`
}
func (m *memory) String() string {
return `memory`
}
func (m *memory) Persistent() bool {
return false
}
func (m *memory) Set(key []byte, value []byte, expiry time.Duration) error {
defer func(begin time.Time) {
m.metrics.updateLatency.Observe(float64(time.Since(begin).Nanoseconds()/1e3), map[string]string{`name`: m.Name(), `type`: `memory`})
}(time.Now())
record := memoryRecord{
key: key,
value: value,
expiry: expiry,
createdAt: time.Now(),
}
m.records.Store(string(key), record)
return nil
}
func (m *memory) Get(key []byte) ([]byte, error) {
defer func(begin time.Time) {
m.metrics.readLatency.Observe(float64(time.Since(begin).Nanoseconds()/1e3), map[string]string{`name`: m.Name(), `type`: `memory`})
}(time.Now())
record, ok := m.records.Load(string(key))
if !ok {
return nil, nil
}
return record.(memoryRecord).value, nil
}
func (m *memory) RangeIterator(fromKy []byte, toKey []byte) backend.Iterator {
panic("implement me")
}
func (m *memory) Iterator() backend.Iterator {
records := m.snapshot()
return &Iterator{
records: records,
valid: len(records) > 0,
}
}
func (m *memory) Delete(key []byte) error {
defer func(begin time.Time) {
m.metrics.deleteLatency.Observe(float64(time.Since(begin).Nanoseconds()/1e3), map[string]string{`name`: m.Name(), `type`: `memory`})
}(time.Now())
m.records.Delete(string(key))
return nil
}
func (m *memory) Destroy() error { return nil }
func (m *memory) SetExpiry(time time.Duration) {}
func (m *memory) reportMetricsSize() {}
func (m *memory) Close() error {
m.records = nil
return nil
}
type Iterator struct {
records []memoryRecord
currentKey int
valid bool
}
func (i *Iterator) SeekToFirst() {
i.currentKey = 0
}
func (i *Iterator) SeekToLast() {
i.currentKey = len(i.records) - 1
}
func (i *Iterator) Seek(key []byte) {
for idx, r := range i.records {
if string(r.key) == string(key) {
i.currentKey = idx
}
}
}
func (i *Iterator) Next() {
if i.currentKey == len(i.records)-1 {
i.valid = false
return
}
i.currentKey += 1
}
func (i *Iterator) Prev() {
if i.currentKey < 0 {
i.valid = false
return
}
i.currentKey += 1
}
func (i *Iterator) Close() {
i.records = nil
}
func (i *Iterator) Key() []byte {
return i.records[i.currentKey].key
}
func (i *Iterator) Value() []byte {
return i.records[i.currentKey].value
}
func (i *Iterator) Valid() bool {
return i.valid
}
func (i *Iterator) Error() error {
return nil
}
<|start_filename|>kstream/processors/transformer_test.go<|end_filename|>
package processors
import (
"context"
"errors"
"testing"
)
var tr TransFunc = func(ctx context.Context, key interface{}, value interface{}) (interface{}, interface{}, error) {
k, ok := key.(int)
if !ok {
return nil, nil, errors.New(`invalid key`)
}
v, ok := value.(string)
if !ok {
return nil, nil, errors.New(`invalid key`)
}
k *= 10
v += `test`
return k, v, nil
}
var transformer = Transformer{
TransFunc: tr,
}
func TestTransformer_Process_Should_Transform(t *testing.T) {
k, v, _, err := transformer.Run(context.Background(), 1, `1`)
if err != nil {
t.Fail()
}
if k != 10 {
t.Fail()
}
if v != `1test` {
t.Fail()
}
}
func TestTransformer_Process_Should_Not_Transform_On_Error(t *testing.T) {
keyOrg := `10`
valOrg := 10
k, v, _, err := transformer.Run(context.Background(), keyOrg, valOrg)
if err == nil {
t.Fail()
}
if k != nil || v != nil {
t.Fail()
}
}
<|start_filename|>examples/example_1/stream/account_debited.go<|end_filename|>
package stream
import (
"context"
"fmt"
"github.com/google/uuid"
"github.com/tryfix/kstream/examples/example_1/events"
"github.com/tryfix/kstream/kstream"
"github.com/tryfix/kstream/kstream/branch"
"github.com/tryfix/kstream/kstream/encoding"
"time"
)
type AccountDebited struct {
Upstream kstream.Stream
AccountDetailTable kstream.GlobalTable
CustomerProfileTable kstream.GlobalTable
KeyEncoder func() encoding.Encoder
MessageEncoder func() encoding.Encoder
}
func (ad AccountDebited) Init() {
accountDebitedBranches := ad.Upstream.Branch([]branch.Details{{Name: `account_debited`, Predicate: func(ctx context.Context, key interface{}, val interface{}) (b bool, e error) {
_, ok := val.(events.AccountDebited)
return ok, nil
}}})
accountDebitedBranch := accountDebitedBranches[0]
filteredAccountDebited := accountDebitedBranch.Filter(ad.filterFromTimestamp)
joinedDebitedAccountDetails := filteredAccountDebited.JoinGlobalTable(ad.AccountDetailTable, ad.accountDebitedAccountDetailsKeyMapping, ad.accountDebitedAccountDetailsMapping, 1) //1 for inner join
joinedDebitedCustomerProfile := joinedDebitedAccountDetails.JoinGlobalTable(ad.CustomerProfileTable, ad.accountDebitedMessageCustomerProfileKeyMapping, ad.accountMessageCustomerProfileDetailsMapping, 1)
joinedDebitedCustomerProfile.To(`message`, ad.KeyEncoder, ad.MessageEncoder)
}
func (ad AccountDebited) filterFromTimestamp(ctx context.Context, key, value interface{}) (b bool, e error) {
accDebited, _ := value.(events.AccountDebited)
if time.Now().UnixNano()/1e6-accDebited.Timestamp > 300000 {
return false, nil
}
return true, nil
}
func (ad AccountDebited) accountDebitedAccountDetailsKeyMapping(key interface{}, value interface{}) (mappedKey interface{}, err error) {
accDebited, _ := value.(events.AccountDebited)
return accDebited.Body.AccountNo, nil
}
func (ad AccountDebited) accountDebitedAccountDetailsMapping(left interface{}, right interface{}) (joined interface{}, err error) {
l, _ := left.(events.AccountDebited)
r, _ := right.(events.AccountDetailsUpdated)
dateTime := time.Unix(l.Body.DebitedAt, 0).Format(time.RFC1123)
text := fmt.Sprintf(`Your a/c %d is debited with %v USD on %v at %v`, l.Body.AccountNo, l.Body.Amount, dateTime, l.Body.Location)
message := events.MessageCreated{
ID: uuid.New().String(),
Type: "message_created",
Timestamp: time.Now().UnixNano() / 1e6,
}
message.Body.CustomerID = r.Body.CustomerID
message.Body.Text = text
return message, nil
}
func (ad AccountDebited) accountDebitedMessageCustomerProfileKeyMapping(key interface{}, value interface{}) (mappedKey interface{}, err error) {
message, _ := value.(events.MessageCreated)
return message.Body.CustomerID, nil
}
func (ad AccountDebited) accountMessageCustomerProfileDetailsMapping(left interface{}, right interface{}) (joined interface{}, err error) {
l, _ := left.(events.MessageCreated)
r, _ := right.(events.CustomerProfileUpdated)
l.Body.Address = r.Body.ContactDetails.Address
l.Body.Phone = r.Body.ContactDetails.Phone
l.Body.Email = r.Body.ContactDetails.Email
return l, nil
}
<|start_filename|>kstream/store/state_store.go<|end_filename|>
package store
import (
"github.com/tryfix/errors"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/encoding"
)
type stateStore struct {
name string
options *storeOptions
backend backend.Backend
keyEncoder encoding.Encoder
valEncoder encoding.Encoder
}
func NewStateStore(name string, keyEncoder encoding.Encoder, valEncoder encoding.Encoder, options ...Options) StateStore {
configs := storeOptions{}
configs.apply(options...)
return &stateStore{
name: name,
keyEncoder: keyEncoder,
valEncoder: valEncoder,
}
}
func (s *stateStore) Name() string {
return s.name
}
func (s *stateStore) Set(key interface{}, value interface{}) error {
k, err := s.keyEncoder.Encode(key)
if err != nil {
return errors.WithPrevious(err, `key encode err `)
}
v, err := s.valEncoder.Encode(value)
if err != nil {
return errors.WithPrevious(err, `key encode err `)
}
return s.backend.Set(k, v, 0)
}
func (s *stateStore) Get(key interface{}) (value interface{}, err error) {
k, err := s.keyEncoder.Encode(key)
if err != nil {
return nil, errors.WithPrevious(err, `key encode err `)
}
byts, err := s.options.backend.Get(k)
if err != nil {
return nil, errors.WithPrevious(err, `key encode err `)
}
v, err := s.valEncoder.Decode(byts)
if err != nil {
return nil, errors.WithPrevious(err, `value decode err `)
}
return v, nil
}
func (s *stateStore) GetAll() ([]*data.Record, error) {
panic("implement me")
}
<|start_filename|>kstream/dlq/dlq.go<|end_filename|>
package dlq
import (
"context"
"errors"
"fmt"
"github.com/tryfix/kstream/data"
kContext "github.com/tryfix/kstream/kstream/context"
"github.com/tryfix/kstream/producer"
)
type DqlType int
const DqlGlobal DqlType = 1
const DqlPerTopic DqlType = 2
type DLQ interface {
Publish(ctx context.Context, record *data.Record) error
}
type Builder func() (DLQ, error)
type dlq struct {
producer producer.Producer
options *Options
}
type Options struct {
BootstrapServers []string
Topic string
TopicFormat string
Type DqlType
Producer producer.Producer
}
func NewDLQ(options *Options) (DLQ, error) {
/*p, err := producer.DefaultBuilder(&producer.Options{
BootstrapServers: options.BootstrapServers,
Partitioner: producer.Random,
})
if err != nil {
return nil, err
}*/
return &dlq{
options: options,
//producer: p,
}, nil
}
func (dq *dlq) Publish(ctx context.Context, record *data.Record) error {
if _, _, err := dq.producer.Produce(ctx, record); err != nil {
return err
}
return nil
}
func (dq *dlq) prepareMessage(ctx context.Context, key []byte, value []byte) (*data.Record, error) {
kCtx, ok := ctx.(*kContext.Context)
if !ok {
return nil, errors.New(`k-stream.DLQ.Publish: published message context should be the type of kstream.Context`)
}
return &data.Record{
Key: key,
Value: value,
Partition: kContext.Meta(kCtx).Partition,
Topic: dq.topic(kContext.Meta(kCtx).Topic),
}, nil
}
func (dq *dlq) topic(topic string) string {
if dq.options.Type == DqlPerTopic {
return fmt.Sprintf(dq.options.TopicFormat, topic)
}
return dq.options.Topic
}
<|start_filename|>kstream/processors/join/global_table_joiner.go<|end_filename|>
package join
import (
"context"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/kstream/kstream/topology"
)
type GlobalTableJoiner struct {
//Topic string
Id int32
Typ Type
Store string
KeyMapper KeyMapper
ValueMapper ValueMapper
store store.Store
Registry store.Registry
childBuilders []topology.NodeBuilder
childs []topology.Node
}
func (j *GlobalTableJoiner) ChildBuilders() []topology.NodeBuilder {
return j.childBuilders
}
func (j *GlobalTableJoiner) Childs() []topology.Node {
return j.childs
}
func (j *GlobalTableJoiner) AddChildBuilder(builder topology.NodeBuilder) {
j.childBuilders = append(j.childBuilders, builder)
}
func (j *GlobalTableJoiner) AddChild(node topology.Node) {
j.childs = append(j.childs, node)
}
func (j *GlobalTableJoiner) Next() bool {
return true
}
func (j *GlobalTableJoiner) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, next bool, err error) {
joined, err := j.Join(ctx, kIn, vIn)
if err != nil {
return
}
for _, child := range j.childs {
_, _, next, err := child.Run(ctx, kIn, joined)
if err != nil || !next {
return nil, nil, false, err
}
}
return kIn, joined, true, err
}
func (j *GlobalTableJoiner) Type() topology.Type {
return topology.TypeJoiner
}
func (j *GlobalTableJoiner) Build() (topology.Node, error) { //TODO: write new build
s, err := j.Registry.Store(j.Store)
if err != nil || s == nil {
return nil, errors.New(`store [` + j.Store + `] dose not exist`)
}
j.store = s
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range j.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &GlobalTableJoiner{
Id: j.Id,
Typ: j.Typ,
Store: j.Store,
KeyMapper: j.KeyMapper,
ValueMapper: j.ValueMapper,
store: j.store,
Registry: j.Registry,
childs: childs,
}, nil
}
func (j *GlobalTableJoiner) Join(ctx context.Context, key interface{}, leftVal interface{}) (joinedVal interface{}, err error) {
// get key from key mapper
k, err := j.KeyMapper(key, leftVal)
if err != nil {
return nil, errors.WithPrevious(err, `KeyMapper error`)
}
// get value from store
rightValue, err := j.store.Get(ctx, k)
if err != nil {
return nil, errors.WithPrevious(err,
fmt.Sprintf(`cannot get value from [%s] store`, j.Store))
}
// for InnerJoin joins if right side lookup nil ignore the join
if j.Typ == InnerJoin && rightValue == nil {
return nil, errors.New(
fmt.Sprintf(`right value lookup failed due to [key [%+v] dose not exist in %s store]`, k, j.store.Name()))
}
// send LeftJoin value and right value to ValueJoiner and get the joined value
valJoined, err := j.ValueMapper(leftVal, rightValue)
if err != nil {
return nil, errors.WithPrevious(err,
`value mapper failed`)
}
return valJoined, nil
}
func (j *GlobalTableJoiner) Name() string {
return j.Store
}
func (j *GlobalTableJoiner) ID() int32 {
return j.Id
}
<|start_filename|>kstream/processors/transformer.go<|end_filename|>
package processors
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/topology"
)
type TransFunc func(ctx context.Context, key, value interface{}) (kOut, vOut interface{}, err error)
type Transformer struct {
Id int32
TransFunc TransFunc
childBuilders []topology.NodeBuilder
childs []topology.Node
}
func (t *Transformer) Childs() []topology.Node {
return t.childs
}
func (t *Transformer) ChildBuilders() []topology.NodeBuilder {
return t.childBuilders
}
func (t *Transformer) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range t.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &Transformer{
TransFunc: t.TransFunc,
childs: childs,
Id: t.Id,
}, nil
}
func (t *Transformer) Next() bool {
return true
}
func (t *Transformer) ID() int32 {
return t.Id
}
func (t *Transformer) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, next bool, err error) {
k, v, err := t.TransFunc(ctx, kIn, vIn)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `transformer error`)
}
for _, child := range t.childs {
_, _, next, err := child.Run(ctx, k, v)
if err != nil || !next {
return nil, nil, false, err
}
}
return k, v, true, err
}
func (t *Transformer) Type() topology.Type {
return topology.Type(`transformer`)
}
func (t *Transformer) Name() string {
return `transformer`
}
func (t *Transformer) AddChildBuilder(builder topology.NodeBuilder) {
t.childBuilders = append(t.childBuilders, builder)
}
func (t *Transformer) AddChild(node topology.Node) {
t.childs = append(t.childs, node)
}
<|start_filename|>producer/producer_test.go<|end_filename|>
package producer
import (
"context"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/data"
"testing"
)
func setupMockTopics(t *testing.T, topics *admin.Topics) {
if err := topics.AddTopic(&admin.MockTopic{
Name: "",
Meta: &admin.Topic{
Name: "testing",
NumPartitions: 2,
},
}); err != nil {
t.Error(err)
}
}
func TestMockProducer_Produce(t *testing.T) {
topics := admin.NewMockTopics()
setupMockTopics(t, topics)
producer := NewMockProducer(topics)
msg := &data.Record{
Key: []byte(`100`),
Value: []byte(`100`),
Partition: 1,
}
p, o, err := producer.Produce(context.Background(), msg)
if err != nil {
t.Error(err)
}
if p != 0 || o != 0 {
t.Fail()
}
}
func TestMockProducer_ProduceBatch(t *testing.T) {
topics := admin.NewMockTopics()
setupMockTopics(t, topics)
producer := NewMockProducer(topics)
msg1 := &data.Record{
Key: []byte(`100`),
Value: []byte(`100`),
Partition: 1,
}
msg2 := *msg1
msg2.Key = []byte(`100`)
err := producer.ProduceBatch(context.Background(), []*data.Record{msg1, &msg2})
if err != nil {
t.Error(err)
}
}
<|start_filename|>kstream/processor.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package kstream
import (
"context"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/changelog"
kContext "github.com/tryfix/kstream/kstream/context"
"github.com/tryfix/kstream/kstream/topology"
"github.com/tryfix/kstream/kstream/worker_pool"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"github.com/tryfix/traceable-context"
"time"
)
type processor struct {
id string
topicPartition consumer.TopicPartition
topologyBuilder *topology.TopologyBuilder
changelogEnabled bool
changelog changelog.Changelog
changelogBuilder changelog.Builder
records <-chan *data.Record
context context.Context
cancel context.CancelFunc
changelogMarks chan *data.Record
taskPoolConfig *worker_pool.PoolConfig
taskPool *worker_pool.Pool
logger log.Logger
metricsReporter metrics.Reporter
metrics struct {
processedLatency metrics.Observer
}
}
func newProcessor(id string, tp consumer.TopicPartition, changelog changelog.Builder, logger log.Logger, metricsReporter metrics.Reporter) (*processor, error) {
ctx, cancelFunc := context.WithCancel(context.Background())
p := &processor{
id: id,
topicPartition: tp,
context: ctx,
cancel: cancelFunc,
changelogMarks: make(chan *data.Record),
logger: logger.NewLog(log.Prefixed(fmt.Sprintf(`processor-%s-%d`, tp.Topic, tp.Partition))),
changelogBuilder: changelog,
metricsReporter: metricsReporter,
}
p.metrics.processedLatency = metricsReporter.Observer(metrics.MetricConf{
Path: `k_stream_stream_processor_job_processed_latency_microseconds`,
Labels: []string{`topic`, `partition`},
})
return p, nil
}
func (p *processor) boot() error {
defer p.logger.Info(`processor booted`)
p.taskPool = worker_pool.NewPool(
p.topicPartition.String(),
p.topologyBuilder,
p.metricsReporter,
p.logger,
p.taskPoolConfig)
if p.changelogEnabled {
stateChangelog, err := p.changelogBuilder(p.id, p.topicPartition.Topic, p.topicPartition.Partition)
if err != nil {
return errors.WithPrevious(err, `cannot init changelog`)
}
p.changelog = stateChangelog
records, err := p.changelog.ReadAll(p.context)
if err != nil {
return errors.WithPrevious(err, `cannot recover`)
}
for _, record := range records {
ctx := p.createContext(record)
// these records are already marked in the changelog so START execution immediately
p.execute(ctx, record.Timestamp, record)
}
}
return nil
}
func (p *processor) start() {
p.logger.Info("processor started")
for record := range p.records {
p.process(record)
}
// records chan is closed stop the processor
p.Stop()
}
func (p *processor) process(record *data.Record) {
ctx := p.createContext(record)
// if message processing method is sync changelog marking is not necessary
begin := time.Now()
if p.taskPoolConfig.Order == worker_pool.OrderPreserved {
p.taskPool.Run(ctx, record.Key, record.Value, func() {
p.metrics.processedLatency.Observe(float64(time.Since(begin).Nanoseconds()/1e3), map[string]string{
`topic`: p.topicPartition.Topic,
`partition`: fmt.Sprint(p.topicPartition.Partition),
})
p.changelogMarks <- record
})
return
}
//first mark record in the changelog
if p.changelogEnabled {
if err := p.changelog.Put(ctx, record); err != nil {
p.logger.ErrorContext(ctx, fmt.Sprintf(`cannot save to changelog - %+v`, err))
} else {
// send record to marked chen
p.changelogMarks <- record
p.logger.TraceContext(ctx, "record mark on changelog")
}
}
p.execute(ctx, begin, record)
}
func (p *processor) execute(ctx context.Context, begin time.Time, record *data.Record) {
p.taskPool.Run(ctx, record.Key, record.Value, func() {
p.commit(ctx, begin, record)
})
}
func (p *processor) commit(ctx context.Context, begin time.Time, record *data.Record) {
// processing is done delete from changelog
if p.changelogEnabled {
if err := p.changelog.Delete(ctx, record); err != nil {
p.logger.ErrorContext(ctx, fmt.Sprintf(`cannot delete from changelog due to %+v`, err))
return
}
p.logger.TraceContext(ctx, `record deleted from changelog`)
}
p.metrics.processedLatency.Observe(float64(time.Since(begin).Nanoseconds()/1e3), map[string]string{
`topic`: p.topicPartition.Topic,
`partition`: fmt.Sprint(p.topicPartition.Partition),
})
}
func (p *processor) createContext(record *data.Record) context.Context {
return kContext.FromRecord(traceable_context.WithUUID(record.UUID), record)
}
func (p *processor) Stop() {
p.logger.Info(`processor stopping...`)
//p.taskPool.Stop()
defer p.logger.Info(`processor stopped`)
p.cancel()
}
<|start_filename|>consumer/partition.go<|end_filename|>
package consumer
import (
"github.com/Shopify/sarama"
"github.com/tryfix/kstream/data"
)
type Partition interface {
Records() <-chan *data.Record
Partition() TopicPartition
MarkOffset(offset int64)
CommitOffset(*data.Record) error
}
type partition struct {
records chan *data.Record
groupSession sarama.ConsumerGroupSession
partition TopicPartition
}
func newPartition(tp TopicPartition) *partition {
return &partition{
records: make(chan *data.Record, 1),
partition: tp,
}
}
func (p *partition) Records() <-chan *data.Record {
return p.records
}
func (p *partition) Partition() TopicPartition {
return p.partition
}
func (p *partition) MarkOffset(offset int64) {
p.groupSession.MarkOffset(p.partition.Topic, p.partition.Partition, offset+1, ``)
}
func (p *partition) CommitOffset(r *data.Record) error {
p.groupSession.MarkOffset(r.Topic, r.Partition, r.Offset+1, ``)
return nil
}
func (p *partition) close() {
close(p.records)
}
<|start_filename|>backend/memory/partitioner.go<|end_filename|>
package memory
//import "github.com/coreos/etcd/mvcc/backend"
//
//type Partitioner interface {
// Partition(key interface{}) backend.Backend
//}
//
//
//
//func (p *Partitioner) Partition() backend.Backend {
//
//}
<|start_filename|>examples/example_2/events/a.go<|end_filename|>
package events
import "encoding/json"
type AA struct {
ID string `json:"id"`
Type string `json:"type"`
AAA string `json:"aaa"`
Timestamp int64 `json:"timestamp"`
}
func (a AA) Encode(data interface{}) ([]byte, error) {
b, err := json.Marshal(data)
if err != nil {
return nil, err
}
return b, nil
}
func (a AA) Decode(data []byte) (interface{}, error) {
ac := AA{}
err := json.Unmarshal(data, &ac)
if err != nil {
return nil, err
}
return ac, nil
}
<|start_filename|>kstream/encoding/int_encoder.go<|end_filename|>
package encoding
import (
"github.com/tryfix/errors"
"reflect"
"strconv"
)
type IntEncoder struct{}
func (IntEncoder) Encode(v interface{}) ([]byte, error) {
i, ok := v.(int)
if !ok {
return nil, errors.Errorf(`invalid type [%v] expected int`, reflect.TypeOf(v))
}
return []byte(strconv.Itoa(i)), nil
}
func (IntEncoder) Decode(data []byte) (interface{}, error) {
i, err := strconv.Atoi(string(data))
if err != nil {
return nil, errors.WithPrevious(err, `cannot decode data`)
}
return i, nil
}
<|start_filename|>kstream/k_sink.go<|end_filename|>
package kstream
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/data"
kContext "github.com/tryfix/kstream/kstream/context"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/kstream/kstream/topology"
"github.com/tryfix/kstream/producer"
"time"
)
type SinkRecord struct {
Key, Value interface{}
Timestamp time.Time // only set if kafka is version 0.10+, inner message timestamp
Headers data.RecordHeaders // only set if kafka is version 0.11+
}
type KSink struct {
Id int32
KeyEncoder encoding.Encoder
ValEncoder encoding.Encoder
Producer producer.Producer
ProducerBuilder producer.Builder
name string
TopicPrefix string
topic topic
Repartitioned bool
info map[string]string
KeyEncoderBuilder encoding.Builder
ValEncoderBuilder encoding.Builder
recordTransformer func(ctx context.Context, in SinkRecord) (out SinkRecord, err error)
recordHeaderExtractor func(ctx context.Context, in SinkRecord) (data.RecordHeaders, error)
tombstoneFiler func(ctx context.Context, in SinkRecord) (tombstone bool)
}
func (s *KSink) Childs() []topology.Node {
return []topology.Node{}
}
func (s *KSink) ChildBuilders() []topology.NodeBuilder {
return []topology.NodeBuilder{}
}
func (s *KSink) Build() (topology.Node, error) {
p, err := s.ProducerBuilder(&producer.Config{
//id: producer.NewProducerId(s.topic(s.topic(s.TopicPrefix))),
})
if err != nil {
return nil, errors.WithPrevious(err, `producer build failed`)
}
s.Producer = p
s.KeyEncoder = s.KeyEncoderBuilder()
s.ValEncoder = s.ValEncoderBuilder()
return s, nil
}
func (s *KSink) AddChildBuilder(builder topology.NodeBuilder) {
panic("implement me")
}
func (s *KSink) AddChild(node topology.Node) {
panic("implement me")
}
//type kSinkBuilder struct {
// keyEncoderBuilder encoding.Builder
// valEncoderBuilder encoding.Builder
// producerBuilder producer.Builder
// name string
// info map[string]string
// topic string
//}
//func (b *kSinkBuilder) AddChildBuilder(builder node.NodeBuilder) {
// panic("implement me")
//}
//
//func (b *kSinkBuilder) Build() (node.Node, error) {
//
// p, err := b.producerBuilder(&producer.Options{
// id: producer.NewProducerId(b.topic),
// })
// if err != nil {
// return nil, errors.WithPrevious(err, `cannot Build producer`)
// }
//
// return &kSink{
// keyEncoder: b.keyEncoderBuilder(),
// valEncoder: b.valEncoderBuilder(),
// producer: p,
// name: b.name,
// topic: b.topic,
// }, nil
//}
type SinkOption func(sink *KSink)
func (s *KSink) applyOptions(options ...SinkOption) {
for _, option := range options {
option(s)
}
}
func (s *KSink) Name() string {
return `sink_` + s.topic(s.TopicPrefix)
}
func (*KSink) Next() bool {
return false
}
func (s *KSink) SinkType() string {
return `kafka`
}
func (*KSink) Type() topology.Type {
return topology.TypeSink
}
func (s *KSink) Info() map[string]string {
return map[string]string{
`topic`: s.topic(s.TopicPrefix),
}
}
// Deprecated: Please use SinkWithProducer instead
func WithProducer(p producer.Builder) SinkOption {
return func(sink *KSink) {
sink.ProducerBuilder = p
}
}
func SinkWithProducer(p producer.Builder) SinkOption {
return func(sink *KSink) {
sink.ProducerBuilder = p
}
}
// Deprecated: Please use SinkWithRecordHeaderExtractor instead
func WithCustomRecord(f func(ctx context.Context, in SinkRecord) (out SinkRecord, err error)) SinkOption {
return func(sink *KSink) {
sink.recordTransformer = f
}
}
func SinkWithRecordHeaderExtractor(f func(ctx context.Context, in SinkRecord) (headers data.RecordHeaders, err error)) SinkOption {
return func(sink *KSink) {
sink.recordHeaderExtractor = f
}
}
func SinkWithTombstoneFilter(f func(ctx context.Context, in SinkRecord) (tombstone bool)) SinkOption {
return func(sink *KSink) {
sink.tombstoneFiler = f
}
}
func withPrefixTopic(topic topic) SinkOption {
return func(sink *KSink) {
sink.topic = topic
}
}
func NewKSinkBuilder(name string, id int32, topic topic, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...SinkOption) *KSink {
builder := &KSink{
ValEncoderBuilder: valEncoder,
KeyEncoderBuilder: keyEncoder,
topic: topic,
name: name,
Id: id,
tombstoneFiler: func(ctx context.Context, in SinkRecord) (tombstone bool) {
return false
},
recordHeaderExtractor: func(ctx context.Context, in SinkRecord) (out data.RecordHeaders, err error) {
return in.Headers, nil
},
recordTransformer: func(ctx context.Context, in SinkRecord) (out SinkRecord, err error) {
return in, nil
},
}
builder.applyOptions(options...)
return builder
}
func (s *KSink) Close() error {
return nil
}
func (s *KSink) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, next bool, err error) {
record := new(data.Record)
record.Timestamp = time.Now()
record.Topic = s.topic(s.TopicPrefix)
skinRecord := SinkRecord{
Key: kIn,
Value: vIn,
Timestamp: record.Timestamp,
Headers: kContext.Meta(ctx).Headers.All(),
}
// Deprecated: apply custom record transformations
customRecord, err := s.recordTransformer(ctx, skinRecord)
if err != nil {
return nil, nil, false, err
}
skinRecord.Key = customRecord.Key
skinRecord.Value = customRecord.Value
skinRecord.Headers = customRecord.Headers
skinRecord.Timestamp = customRecord.Timestamp
// apply data record headers
headers, err := s.recordHeaderExtractor(ctx, skinRecord)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `record extract failed`)
}
skinRecord.Headers = headers
keyByt, err := s.KeyEncoder.Encode(skinRecord.Key)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `sink key encode error`)
}
record.Key = keyByt
// if the record value is null or marked as null with a tombstoneFiler set the record value as null
tombstoned := s.tombstoneFiler(ctx, skinRecord)
if skinRecord.Key == nil || tombstoned {
record.Value = nil
} else {
valByt, err := s.ValEncoder.Encode(skinRecord.Value)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `sink value encode error`)
}
record.Value = valByt
}
record.Headers = skinRecord.Headers
if _, _, err := s.Producer.Produce(ctx, record); err != nil {
return nil, nil, false, err
}
return nil, nil, true, nil
}
func (s *KSink) ID() int32 {
return s.Id
}
<|start_filename|>kstream/processors/join/global_table_joiner_test.go<|end_filename|>
package join
import (
"context"
"encoding/json"
"fmt"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/kstream/kstream/store"
"reflect"
"testing"
)
type rightRecord struct {
PrimaryKey int `json:"primary_key"`
ForeignKey int `json:"foreign_key"`
}
type leftRecord struct {
PrimaryKey int `json:"primary_key"`
ForeignKey int `json:"foreign_key"`
}
type joinedRecord struct {
left leftRecord
right rightRecord
}
func (e rightRecord) Decode(data []byte) (interface{}, error) {
v := rightRecord{}
if err := json.Unmarshal(data, &v); err != nil {
return nil, err
}
return v, nil
}
func (rightRecord) Encode(data interface{}) ([]byte, error) {
return json.Marshal(data)
}
var testStore = store.NewMockStore(
`test_store`,
encoding.IntEncoder{},
rightRecord{},
backend.NewMockBackend(`test_backend`, 0))
func makeJoiner(typ Type) *GlobalTableJoiner {
return &GlobalTableJoiner{
store: testStore,
KeyMapper: func(key interface{}, value interface{}) (mappedKey interface{}, err error) {
v, _ := value.(leftRecord)
return v.ForeignKey, nil
},
ValueMapper: func(left interface{}, right interface{}) (joined interface{}, err error) {
l, _ := left.(leftRecord)
r, _ := right.(rightRecord)
return joinedRecord{
left: l,
right: r,
}, nil
},
Typ: typ,
}
}
func TestGlobalTableJoiner_Join_Inner(t *testing.T) {
leftRecord := leftRecord{
PrimaryKey: 1000,
ForeignKey: 2000,
}
rightRecord := rightRecord{
PrimaryKey: 1000,
ForeignKey: 2000,
}
err := testStore.Set(context.Background(), 2000, rightRecord, 0)
if err != nil {
t.Error(err)
}
defer testStore.Delete(context.Background(), 2000)
joiner := makeJoiner(InnerJoin)
v, err := joiner.Join(context.Background(), 1000, leftRecord)
if err != nil {
t.Error(err)
}
if _, ok := v.(joinedRecord); !ok {
t.Error(`invalid record`)
}
}
func TestGlobalTableJoiner_Join_Inner_Should_Return_Error_When_Right_Null(t *testing.T) {
leftRecord := leftRecord{
PrimaryKey: 1000,
ForeignKey: 2000,
}
joiner := makeJoiner(InnerJoin)
v, err := joiner.Join(context.Background(), 1000, leftRecord)
if err == nil {
t.Error(err)
}
//log.Fatal(v)
if v != nil {
t.Error(`joined value must null when right lookup failed`)
}
}
func TestGlobalTableJoiner_Join_Inner_Should_Return_Error_When_Left_Null(t *testing.T) {
rightRecord := rightRecord{
PrimaryKey: 1000,
ForeignKey: 2000,
}
err := testStore.Set(context.Background(), 2000, rightRecord, 0)
if err != nil {
t.Error(err)
}
defer testStore.Delete(context.Background(), 2000)
joiner := makeJoiner(InnerJoin)
v, err := joiner.Join(context.Background(), 1000, nil)
if err == nil {
t.Error(err)
}
if v != nil {
t.Error(`joined value must null when right lookup failed`)
}
}
func TestGlobalTableJoiner_Join_Left(t *testing.T) {
leftRecord := leftRecord{
PrimaryKey: 1000,
ForeignKey: 2000,
}
joiner := makeJoiner(LeftJoin)
v, err := joiner.Join(context.Background(), 1000, leftRecord)
if err != nil {
t.Error(err)
return
}
if _, ok := v.(joinedRecord); !ok {
t.Error(fmt.Sprintf(`want [joinedRecord] have [%+v]`, reflect.TypeOf(v)))
}
}
<|start_filename|>kstream/store/registry.go<|end_filename|>
package store
import (
"errors"
"fmt"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"sync"
)
type Registry interface {
Register(store Store)
New(name string, keyEncoder, valEncoder encoding.Builder, options ...Options) Store
NewIndexedStore(name string, keyEncoder, valEncoder encoding.Builder, indexes []Index, options ...Options) IndexedStore
Store(name string) (Store, error)
Index(name string) (Index, error)
Stores() []Store
Indexes() []Index
}
type registry struct {
stores map[string]Store
stateStores map[string]StateStore
indexes map[string]Index
mu *sync.Mutex
logger log.Logger
applicationId string
storeBuilder Builder
indexedStoreBuilder IndexedStoreBuilder
stateStoreBuilder StateStoreBuilder
}
type RegistryConfig struct {
Host string
HttpEnabled bool
applicationId string
StoreBuilder Builder
StateStoreBuilder StateStoreBuilder
IndexedStoreBuilder IndexedStoreBuilder
Logger log.Logger
MetricsReporter metrics.Reporter
}
func NewRegistry(config *RegistryConfig) Registry {
reg := ®istry{
stores: make(map[string]Store),
stateStores: make(map[string]StateStore),
indexes: make(map[string]Index),
mu: &sync.Mutex{},
logger: config.Logger.NewLog(log.Prefixed(`store-registry`)),
applicationId: config.applicationId,
stateStoreBuilder: config.StateStoreBuilder,
indexedStoreBuilder: config.IndexedStoreBuilder,
storeBuilder: config.StoreBuilder,
}
if config.HttpEnabled {
MakeEndpoints(config.Host, reg, reg.logger.NewLog(log.Prefixed(`http`)))
}
return reg
}
func (r *registry) Register(store Store) {
name := store.Name()
if _, ok := r.stores[name]; ok {
r.logger.Fatal(fmt.Sprintf(`store [%s] already exist`, name))
}
// if store is an IndexedStore store register Indexes
if stor, ok := store.(IndexedStore); ok {
for _, idx := range stor.Indexes() {
r.indexes[idx.String()] = idx
}
}
r.stores[name] = store
}
func (r *registry) New(name string, keyEncoder encoding.Builder, valEncoder encoding.Builder, options ...Options) Store {
if _, ok := r.stores[name]; ok {
r.logger.Fatal(fmt.Sprintf(`store [%s] already exist`, name))
}
s, err := r.storeBuilder(name, keyEncoder, valEncoder, options...)
if err != nil {
r.logger.Fatal(err)
}
r.stores[name] = s
return r.stores[name]
}
func (r *registry) NewIndexedStore(name string, keyEncoder, valEncoder encoding.Builder, indexes []Index, options ...Options) IndexedStore {
if _, ok := r.stores[name]; ok {
r.logger.Fatal(fmt.Sprintf(`store [%s] already exist`, name))
}
s, err := r.indexedStoreBuilder(name, keyEncoder, valEncoder, indexes, options...)
if err != nil {
r.logger.Fatal(err)
}
r.stores[name] = s
for _, idx := range s.Indexes() {
r.indexes[idx.String()] = idx
}
return s
}
func (r *registry) Store(name string) (Store, error) {
r.mu.Lock()
defer r.mu.Unlock()
store, ok := r.stores[name]
if !ok {
return nil, errors.New(fmt.Sprintf(`unknown store [%s]`, name))
}
return store, nil
}
func (r *registry) Index(name string) (Index, error) {
r.mu.Lock()
defer r.mu.Unlock()
idx, ok := r.indexes[name]
if !ok {
return nil, errors.New(fmt.Sprintf(`unknown index [%s]`, name))
}
return idx, nil
}
func (r *registry) Stores() []Store {
var list []Store
for _, stor := range r.stores {
list = append(list, stor)
}
return list
}
func (r *registry) Indexes() []Index {
var list []Index
for _, idx := range r.indexes {
list = append(list, idx)
}
return list
}
<|start_filename|>examples/example_2/stream/mock-stream/mock_stream.go<|end_filename|>
package main
import (
"context"
"fmt"
"github.com/google/uuid"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/examples/example_2/domain"
"github.com/tryfix/kstream/examples/example_2/encoders"
"github.com/tryfix/kstream/examples/example_2/events"
"github.com/tryfix/kstream/examples/example_2/stream"
"github.com/tryfix/kstream/kstream"
"github.com/tryfix/kstream/kstream/offsets"
"github.com/tryfix/kstream/kstream/worker_pool"
"github.com/tryfix/kstream/producer"
"github.com/tryfix/log"
"os"
"os/signal"
"time"
)
func setupMockBuilders() *kstream.StreamBuilder {
config := kstream.NewStreamBuilderConfig()
topics := admin.NewMockTopics()
kafkaAdmin := &admin.MockKafkaAdmin{
Topics: topics,
}
if err := kafkaAdmin.CreateTopics(map[string]*admin.Topic{
domain.ABCTopic: {
Name: domain.ABCTopic,
NumPartitions: 2,
ReplicationFactor: 1,
},
}); err != nil {
log.Fatal(err)
}
prod := producer.NewMockProducer(topics)
offsetManager := &offsets.MockManager{Topics: topics}
go produceAAndB(prod)
//go produceAccountDebited(prod)
//go consumeMessageAndPrint(topics)
config.BootstrapServers = []string{`localhost:9092`}
config.ApplicationId = `k_stream_example_1`
config.ConsumerCount = 1
config.Host = `localhost:8100`
config.AsyncProcessing = true
//config.Store.StorageDir = `storage`
config.Store.Http.Enabled = true
config.Store.Http.Host = `:9002`
config.ChangeLog.Enabled = false
//config.ChangeLog.Buffer.Enabled = true
//config.ChangeLog.Buffer.Size = 100
//config.ChangeLog.ReplicationFactor = 3
//config.ChangeLog.MinInSycReplicas = 2
config.WorkerPool.Order = worker_pool.OrderByKey
config.WorkerPool.NumOfWorkers = 100
config.WorkerPool.WorkerBufferSize = 10
config.Logger = log.NewLog(
log.WithLevel(`INFO`),
log.WithColors(true),
).Log()
return kstream.NewStreamBuilder(config,
kstream.WithPartitionConsumerBuilder(consumer.NewMockPartitionConsumerBuilder(topics, offsetManager)),
kstream.WithConsumerBuilder(consumer.NewMockConsumerBuilder(topics)),
kstream.WithOffsetManager(offsetManager),
kstream.WithKafkaAdmin(kafkaAdmin),
kstream.WithProducerBuilder(func(configs *producer.Config) (i producer.Producer, e error) {
return prod, nil
}),
)
}
func main() {
builder := setupMockBuilders()
//mockBackend := backend.NewMockBackend(`mock_backend`, time.Duration(time.Second * 3600))
//accountDetailMockStore := store.NewMockStore(`account_detail_store`, encoders.KeyEncoder(), encoders.AccountDetailsUpdatedEncoder(), mockBackend)
//builder.StoreRegistry().Register(accountDetailMockStore)
//
//customerProfileMockStore := store.NewMockStore(`customer_profile_store`, encoders.KeyEncoder(), encoders.CustomerProfileUpdatedEncoder(), mockBackend)
//builder.StoreRegistry().Register(customerProfileMockStore)
//builder.StoreRegistry().New(
// `account_detail_store`,
// encoders.KeyEncoder,
// encoders.AccountDetailsUpdatedEncoder)
//
//builder.StoreRegistry().New(
// `customer_profile_store`,
// encoders.KeyEncoder,
// encoders.CustomerProfileUpdatedEncoder)
err := builder.Build(stream.InitStreams(builder)...)
if err != nil {
log.Fatal(`mock build failed`)
}
synced := make(chan bool, 1)
// trap SIGINT to trigger a shutdown.
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
streams := kstream.NewStreams(builder, kstream.NotifyOnStart(synced))
go func() {
select {
case <-signals:
streams.Stop()
}
}()
if err := streams.Start(); err != nil {
log.Fatal(log.WithPrefix(`boot.boot.Init`, `error in stream starting`), err)
}
//produceRealData()
}
//func produceAccountCredited(streamProducer producer.Producer) {
//
// for {
// key := rand.Int63n(100)
// event := events.AccountCredited{
// ID: uuid.New().String(),
// Type: `account_credited`,
// Timestamp: time.Now().UnixNano() / 1e6,
// }
// event.Body.AccountNo = key
// event.Body.TransactionId = rand.Int63n(10000)
// event.Body.Amount = 1000.00
// event.Body.Reason = `utility bill transfer`
// event.Body.DebitedFrom = 1111
// event.Body.CreditedAt = time.Now().UnixNano() / 1e6
// event.Body.Location = `Main Branch, City A`
//
// encodedKey, err := encoders.KeyEncoder().Encode(key)
// if err != nil {
// log.Error(err, event)
// }
// encodedVal, err := encoders.AccountCreditedEncoder().Encode(event)
// if err != nil {
// log.Error(err, event)
// }
//
// _, _, err = streamProducer.Produce(context.Background(), &data.Record{
// Key: encodedKey,
// Value: encodedVal,
// Topic: `transaction`,
// Timestamp: time.Now(),
// })
//
// if err != nil {
// log.Error(err)
// }
//
// time.Sleep(time.Millisecond * 500)
// }
//
//}
func produceAAndB(streamProducer producer.Producer) {
for {
key := uuid.New().String()
produceA(streamProducer, key)
time.Sleep(time.Millisecond * 100)
produceB(streamProducer, key)
time.Sleep(time.Millisecond * 100)
produceC(streamProducer, key)
time.Sleep(time.Millisecond * 500)
key = uuid.New().String()
produceB(streamProducer, key)
time.Sleep(time.Millisecond * 100)
produceC(streamProducer, key)
time.Sleep(time.Millisecond * 100)
produceA(streamProducer, key)
time.Sleep(time.Millisecond * 500)
}
}
func produceA(streamProducer producer.Producer, key string) {
event := events.AA{
ID: uuid.New().String(),
Type: `aa`,
AAA: fmt.Sprintf(`aaa with key : %v`, key),
Timestamp: time.Now().UnixNano() / 1e6,
}
encodedKey, err := encoders.StringEncoder().Encode(key)
if err != nil {
log.Error(err, event)
}
encodedVal, err := encoders.AAEncoder().Encode(event)
if err != nil {
log.Error(err, event)
}
_, _, err = streamProducer.Produce(context.Background(), &data.Record{
Key: encodedKey,
Value: encodedVal,
Topic: domain.ABCTopic,
Timestamp: time.Now(),
})
if err != nil {
log.Error(err)
}
}
func produceB(streamProducer producer.Producer, key string) {
event := events.BB{
ID: uuid.New().String(),
Type: `bb`,
BBB: fmt.Sprintf(`bbb with key : %v`, key),
Timestamp: time.Now().UnixNano() / 1e6,
}
encodedKey, err := encoders.StringEncoder().Encode(key)
if err != nil {
log.Error(err, event)
}
encodedVal, err := encoders.BBEncoder().Encode(event)
if err != nil {
log.Error(err, event)
}
_, _, err = streamProducer.Produce(context.Background(), &data.Record{
Key: encodedKey,
Value: encodedVal,
Topic: domain.ABCTopic,
Timestamp: time.Now(),
})
if err != nil {
log.Error(err)
}
}
func produceC(streamProducer producer.Producer, key string) {
event := events.CC{
ID: uuid.New().String(),
Type: `cc`,
CCC: fmt.Sprintf(`ccc with key : %v`, key),
Timestamp: time.Now().UnixNano() / 1e6,
}
encodedKey, err := encoders.StringEncoder().Encode(key)
if err != nil {
log.Error(err, event)
}
encodedVal, err := encoders.CCEncoder().Encode(event)
if err != nil {
log.Error(err, event)
}
_, _, err = streamProducer.Produce(context.Background(), &data.Record{
Key: encodedKey,
Value: encodedVal,
Topic: domain.ABCTopic,
Timestamp: time.Now(),
})
if err != nil {
log.Error(err)
}
}
//func produceAccountDetails(streamProducer producer.Producer) {
// for i := 1; i <= 100; i++ {
// key := int64(i)
// event := events.AccountDetailsUpdated{
// ID: uuid.New().String(),
// Type: `account_details_updated`,
// Timestamp: time.Now().UnixNano() / 1e6,
// }
// event.Body.AccountNo = key
// event.Body.AccountType = `Saving`
// event.Body.CustomerID = rand.Int63n(100)
// event.Body.Branch = `Main Branch, City A`
// event.Body.BranchCode = 1
// event.Body.UpdatedAt = time.Now().Unix()
//
// encodedKey, err := encoders.KeyEncoder().Encode(key)
// if err != nil {
// log.Error(err, event)
// }
// encodedVal, err := encoders.AccountDetailsUpdatedEncoder().Encode(event)
// if err != nil {
// log.Error(err, event)
// }
//
// _, _, err = streamProducer.Produce(context.Background(), &data.Record{
// Key: encodedKey,
// Value: encodedVal,
// Topic: `account_detail`,
// Timestamp: time.Now(),
// })
//
// if err != nil {
// log.Error(err)
// }
//
// time.Sleep(time.Millisecond * 5)
// }
//}
//
//func produceCustomerProfile(streamProducer producer.Producer) {
//
// for i := 1; i <= 100; i++ {
// key := int64(i)
// event := events.CustomerProfileUpdated{
// ID: uuid.New().String(),
// Type: `customer_profile_updated`,
// Timestamp: time.Now().UnixNano() / 1e6,
// }
// event.Body.CustomerID = key
// event.Body.CustomerName = `<NAME>`
// event.Body.NIC = `222222222v`
// event.Body.ContactDetails.Email = `<EMAIL>`
// event.Body.ContactDetails.Phone = `911`
// event.Body.ContactDetails.Address = `No 1, Lane 1, City A.`
// event.Body.DateOfBirth = `16th-Nov-2019`
// event.Body.UpdatedAt = time.Now().Unix()
//
// encodedKey, err := encoders.KeyEncoder().Encode(key)
// if err != nil {
// log.Error(err, event)
// }
// encodedVal, err := encoders.CustomerProfileUpdatedEncoder().Encode(event)
// if err != nil {
// log.Error(err, event)
// }
//
// _, _, err = streamProducer.Produce(context.Background(), &data.Record{
// Key: encodedKey,
// Value: encodedVal,
// Topic: `customer_profile`,
// Timestamp: time.Now(),
// })
//
// if err != nil {
// log.Error(err)
// }
//
// time.Sleep(time.Millisecond * 5)
// }
//}
//
//func consumeMessageAndPrint(topics *admin.Topics) {
// mockConsumer := consumer.NewMockConsumer(topics)
// partitions, err := mockConsumer.Consume([]string{`message`}, rebalanceHandler{})
// if err != nil {
// log.Fatal(`consumer error `, err)
// }
//
// for p := range partitions {
// go func(pt consumer.Partition) {
// for record := range pt.Records() {
// log.Debug(fmt.Sprintf(`message was received to partition %v with offset %v `, record.Partition, record.Offset))
// m, err := encoders.MessageEncoder().Decode(record.Value)
// if err != nil {
// log.Error(err)
// }
//
// message, _ := m.(events.MessageCreated)
// fmt.Println(fmt.Sprintf(`received text message := %s`, message.Body.Text))
// log.Info(fmt.Sprintf(`received text message := %s`, message.Body.Text))
// }
// }(p)
// }
//}
type rebalanceHandler struct {
}
func (r rebalanceHandler) OnPartitionRevoked(ctx context.Context, revoked []consumer.TopicPartition) error {
return nil
}
func (r rebalanceHandler) OnPartitionAssigned(ctx context.Context, assigned []consumer.TopicPartition) error {
return nil
}
func produceRealData() {
config := producer.NewConfig()
config.Logger = log.NewLog(
log.WithLevel(`INFO`),
log.WithColors(true),
).Log()
config.BootstrapServers = []string{`localhost:9092`}
//pro, err := producer.NewProducer(config)
//if err != nil {
// log.Fatal(err)
//}
//produceAccountDetails(pro)
//produceCustomerProfile(pro)
//go produceAccountCredited(pro)
//produceAccountDebited(pro)
}
<|start_filename|>admin/kafka_admin_test.go<|end_filename|>
package admin
import (
"github.com/Shopify/sarama"
"github.com/tryfix/log"
"testing"
)
func TestKafkaAdmin_FetchInfo(t *testing.T) {
seedBroker := sarama.NewMockBroker(t, 1)
defer seedBroker.Close()
seedBroker.SetHandlerByMap(map[string]sarama.MockResponse{
"MetadataRequest": sarama.NewMockMetadataResponse(t).
SetController(seedBroker.BrokerID()).
SetLeader("my_topic", 0, seedBroker.BrokerID()).
SetBroker(seedBroker.Addr(), seedBroker.BrokerID()),
"DescribeConfigsRequest": sarama.NewMockDescribeConfigsResponse(t),
})
config := sarama.NewConfig()
config.Version = sarama.V1_0_0_0
saramaAdmin, err := sarama.NewClusterAdmin([]string{seedBroker.Addr()}, config)
if err != nil {
t.Fatal(err)
}
topic := `my_topic`
admin := &kafkaAdmin{
admin: saramaAdmin,
logger: log.NewNoopLogger(),
}
tps, err := admin.FetchInfo([]string{topic})
if err != nil {
t.Error(err)
}
if tps[topic].NumPartitions != 1 {
t.Fail()
}
err = saramaAdmin.Close()
if err != nil {
t.Fatal(err)
}
}
func TestKafkaAdmin_CreateTopics(t *testing.T) {
seedBroker := sarama.NewMockBroker(t, 1)
defer seedBroker.Close()
seedBroker.SetHandlerByMap(map[string]sarama.MockResponse{
"MetadataRequest": sarama.NewMockMetadataResponse(t).
SetController(seedBroker.BrokerID()).
SetBroker(seedBroker.Addr(), seedBroker.BrokerID()),
"CreateTopicsRequest": sarama.NewMockCreateTopicsResponse(t),
})
config := sarama.NewConfig()
config.Version = sarama.V0_10_2_0
saramaAdmin, err := sarama.NewClusterAdmin([]string{seedBroker.Addr()}, config)
if err != nil {
t.Fatal(err)
}
topic := `my_topic`
admin := &kafkaAdmin{
admin: saramaAdmin,
logger: log.NewNoopLogger(),
}
err = admin.CreateTopics(map[string]*Topic{
topic: {
Name: topic,
NumPartitions: 1,
ReplicationFactor: 1,
},
})
if err != nil {
t.Fatal(err)
}
err = saramaAdmin.Close()
if err != nil {
t.Fatal(err)
}
}
func TestKafkaAdmin_DeleteTopics(t *testing.T) {
seedBroker := sarama.NewMockBroker(t, 1)
defer seedBroker.Close()
seedBroker.SetHandlerByMap(map[string]sarama.MockResponse{
"MetadataRequest": sarama.NewMockMetadataResponse(t).
SetController(seedBroker.BrokerID()).
SetBroker(seedBroker.Addr(), seedBroker.BrokerID()),
"DeleteTopicsRequest": sarama.NewMockDeleteTopicsResponse(t),
})
config := sarama.NewConfig()
config.Version = sarama.V0_10_2_0
saramaAdmin, err := sarama.NewClusterAdmin([]string{seedBroker.Addr()}, config)
if err != nil {
t.Fatal(err)
}
topic := `my_topic`
admin := &kafkaAdmin{
admin: saramaAdmin,
logger: log.NewNoopLogger(),
}
_, err = admin.DeleteTopics([]string{topic})
if err != nil {
t.Fatal(err)
}
err = saramaAdmin.Close()
if err != nil {
t.Fatal(err)
}
}
<|start_filename|>examples/example_2/stream/abcCommonStream.go<|end_filename|>
package stream
import (
"github.com/tryfix/kstream/examples/example_2/domain"
"github.com/tryfix/kstream/examples/example_2/encoders"
"github.com/tryfix/kstream/kstream"
)
func initCommonStream(builder *kstream.StreamBuilder) kstream.Stream {
str := builder.Stream(
domain.ABCTopic,
encoders.StringEncoder,
encoders.CommonABEncoder,
kstream.WithConfig(map[string]interface{}{
//`stream.processor.retry`: 2,
//`stream.processor.retry.interval`: 3000,
//`stream.processor.changelog`: false,
//`stream.processor.changelog.minInSyncReplicas`: 2,
//`stream.processor.changelog.replicationFactor`: 3,
//`stream.processor.changelog.buffered`: true,
//`stream.processor.changelog.BufferedSize`: 100,
}))
AStream{
Upstream: str,
}.Init()
return str
}
<|start_filename|>kstream/offsets/resetter.go<|end_filename|>
package offsets
//import (
// "github.com/Shopify/sarama"
// "github.com/tryfix/kstream/consumer"
//)
//
//type Resetter struct {
// client sarama.Client
//}
//
//func (r *Resetter) Reset([]consumer.TopicPartition) {
// //b, err := r.client.Config()
//
//}
<|start_filename|>kstream/changelog/buffer_test.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package changelog
//import (
// "github.com/tryfix/kstream/consumer"
// "github.com/tryfix/kstream/data"
// "github.com/tryfix/kstream/producer"
// "testing"
// "time"
//)
//
//func TestNewBuffer(t *testing.T) {
// b := NewBuffer(producer.NewMockProducer(t), 10, 10*time.Second)
// if b.records == nil {
// t.Fail()
// }
//
// if b.mu == nil {
// t.Fail()
// }
//}
//
//func TestBufferStore(t *testing.T) {
// b := NewBuffer(producer.NewMockProducer(t), 10, 10*time.Second)
//
// rec := new(data.Record)
// rec.Key = []byte(`key`)
// b.Store(rec)
//
// if string(b.records[0].Key) != string(rec.Key) {
// t.Fail()
// }
//}
//
//func TestBufferClear(t *testing.T) {
// b := NewBuffer(producer.NewMockProducer(t), 10, 10*time.Second)
//
// rec := new(data.Record)
// rec.Key = []byte(``)
// rec.Value = []byte(``)
// b.Store(rec)
//
// b.Clear()
//
// if len(b.records) > 0 {
// t.Fail()
// }
//}
//
//func TestBufferShouldClearOnceFull(t *testing.T) {
// size := 5
//
// b := NewBuffer(producer.NewMockProducer(t), size, 10*time.Millisecond)
// go b.runFlusher()
//
// time.Sleep(1 * time.Second)
//
// rec := new(data.Record)
// for i := 0; i < size*20+1; i++ {
// b.Store(rec)
// }
//
// if len(b.records) != size {
// t.Fail()
// }
//}
//
//func TestBufferFlushInterval(t *testing.T) {
// d := 100 * time.Millisecond
// b := NewBuffer(producer.NewMockProducer(t), 10, d)
// go b.runFlusher()
//
// time.Sleep(d)
//
// rec := new(data.Record)
// rec.Key = []byte(`100`)
// rec.Value = []byte(`200`)
// b.Store(rec)
//
// time.Sleep(d + 1*time.Second)
//
// if len(b.records) > 0 {
// t.Fail()
// }
//}
<|start_filename|>examples/example_2/events/c.go<|end_filename|>
package events
import "encoding/json"
type CC struct {
ID string `json:"id"`
Type string `json:"type"`
CCC string `json:"ccc"`
Timestamp int64 `json:"timestamp"`
}
func (ad CC) Encode(data interface{}) ([]byte, error) {
b, err := json.Marshal(data)
if err != nil {
return nil, err
}
return b, nil
}
func (ad CC) Decode(data []byte) (interface{}, error) {
debited := CC{}
err := json.Unmarshal(data, &debited)
if err != nil {
return nil, err
}
return debited, nil
}
<|start_filename|>kstream/store/indexed_bench_test.go<|end_filename|>
package store
import (
"math/rand"
"strconv"
"strings"
"testing"
)
func BenchmarkHashIndex_Write(b *testing.B) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := index.Write(strconv.Itoa(rand.Intn(100000)+1), `111,222`); err != nil {
b.Error(err)
}
}
})
}
func BenchmarkHashIndex_Read(b *testing.B) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
for i := 1; i < 1000; i++ {
if err := index.Write(strconv.Itoa(i), `111,222`); err != nil {
b.Error(err)
}
}
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := index.Read(`111`); err != nil {
b.Error(err)
}
}
})
}
<|start_filename|>examples/example_1/stream/customer_profile_global_table.go<|end_filename|>
package stream
import (
"github.com/tryfix/kstream/examples/example_1/encoders"
kstream "github.com/tryfix/kstream/kstream"
)
func initCustomerProfileTable(builder *kstream.StreamBuilder) kstream.GlobalTable {
return builder.GlobalTable(
`customer_profile`,
encoders.KeyEncoder,
encoders.CustomerProfileUpdatedEncoder,
`customer_profile_store`)
}
<|start_filename|>kstream/store/mock_store.go<|end_filename|>
package store
import (
"context"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/kstream/encoding"
"time"
)
type MockStore struct {
name string
backend backend.Backend
kEncoder encoding.Encoder
vEncoder encoding.Encoder
}
type MockRecord struct {
ctx context.Context
key interface{}
value interface{}
expiry time.Duration
}
func NewMockStore(name string, kEncode encoding.Encoder, vEncoder encoding.Encoder, backend backend.Backend, records ...MockRecord) Store {
store := &MockStore{
name: name,
kEncoder: kEncode,
vEncoder: vEncoder,
backend: backend,
}
for _, record := range records {
if err := store.Set(record.ctx, record.key, record.value, record.expiry); err != nil {
panic(err)
}
}
return store
}
func (s *MockStore) Name() string {
return s.name
}
func (s *MockStore) Backend() backend.Backend {
return s.backend
}
func (s *MockStore) KeyEncoder() encoding.Encoder {
return s.kEncoder
}
func (s *MockStore) ValEncoder() encoding.Encoder {
return s.vEncoder
}
func (s *MockStore) Set(ctx context.Context, key interface{}, value interface{}, expiry time.Duration) error {
k, err := s.kEncoder.Encode(key)
if err != nil {
return err
}
v, err := s.ValEncoder().Encode(value)
if err != nil {
return err
}
return s.backend.Set(k, v, expiry)
}
func (s *MockStore) Get(ctx context.Context, key interface{}) (value interface{}, err error) {
k, err := s.kEncoder.Encode(key)
if err != nil {
return nil, err
}
v, err := s.backend.Get(k)
if err != nil {
return nil, err
}
if v == nil {
return nil, nil
}
val, err := s.vEncoder.Decode(v)
if err != nil {
return nil, err
}
return val, nil
}
func (*MockStore) GetRange(ctx context.Context, fromKey interface{}, toKey interface{}) (map[interface{}]interface{}, error) {
panic("implement me")
}
func (*MockStore) GetAll(ctx context.Context) (Iterator, error) {
panic("implement me")
}
func (s *MockStore) Delete(ctx context.Context, key interface{}) error {
k, err := s.kEncoder.Encode(key)
if err != nil {
return err
}
return s.backend.Delete(k)
}
func (s *MockStore) String() string {
return s.name
}
<|start_filename|>kstream/changelog/state_changelog_test.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package changelog
import "testing"
func TestStateChangelog_ReadAll(t *testing.T) {
}
func TestStateChangelog_Put(t *testing.T) {
}
<|start_filename|>kstream/processors/join/stream_joiner.go<|end_filename|>
package join
import (
"context"
"github.com/tryfix/kstream/kstream/topology"
)
type StreamJoiner struct {
Id int32
childs []topology.Node
childBuilders []topology.NodeBuilder
}
func (j *StreamJoiner) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range j.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &StreamJoiner{
childs: childs,
Id: j.Id,
}, nil
}
func (j *StreamJoiner) ChildBuilders() []topology.NodeBuilder {
return j.childBuilders
}
func (j *StreamJoiner) AddChildBuilder(builder topology.NodeBuilder) {
j.childBuilders = append(j.childBuilders, builder)
}
func (j *StreamJoiner) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error) {
for _, child := range j.childs {
_, _, _, err := child.Run(ctx, kIn, vIn)
if err != nil {
return nil, nil, false, err
}
}
return kIn, vIn, true, nil
}
func (j *StreamJoiner) Childs() []topology.Node {
return j.childs
}
func (j *StreamJoiner) AddChild(node topology.Node) {
j.childs = append(j.childs, node)
}
func (j *StreamJoiner) Next() bool {
return true
}
func (j *StreamJoiner) Type() topology.Type {
return topology.Type(`stream_joiner`)
}
func (j *StreamJoiner) Name() string {
return `stream_joiner`
}
func (j *StreamJoiner) ID() int32 {
return j.Id
}
//type StreamJoinEncoder struct {
// typ reflect.Type
//}
//
//func (s *StreamJoinEncoder) Encode(data interface{}) ([]byte, error) {
// s.typ = reflect.TypeOf(data)
// var buf bytes.Buffer
// enc := gob.NewEncoder(&buf)
// err := enc.Encode(data)
// if err != nil {
// return nil, err
// }
// return buf.Bytes(), nil
//}
//
//func (s *StreamJoinEncoder) Decode(data []byte) (interface{}, error) {
// decoded := reflect.New(s.typ)
// buf := bytes.NewBuffer(data)
// dec := gob.NewDecoder(buf)
// err := dec.Decode(decoded)
// if err != nil {
// return decoded.Interface(),err
// }
// return decoded.Interface(), nil
//}
<|start_filename|>examples/example_2/events/ab.go<|end_filename|>
package events
import "encoding/json"
type AB struct {
ID string `json:"id"`
Type string `json:"type"`
AAA string `json:"aaa"`
BBB string `json:"bbb"`
TimestampA int64 `json:"timestamp_a"`
TimestampB int64 `json:"timestamp_b"`
}
func (a AB) Encode(data interface{}) ([]byte, error) {
b, err := json.Marshal(data)
if err != nil {
return nil, err
}
return b, nil
}
func (a AB) Decode(data []byte) (interface{}, error) {
ac := AB{}
err := json.Unmarshal(data, &ac)
if err != nil {
return nil, err
}
return ac, nil
}
<|start_filename|>consumer/mock_partition_consumer.go<|end_filename|>
package consumer
import (
"github.com/google/uuid"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/offsets"
"log"
"time"
)
type mockPartitionConsumer struct {
topics *admin.Topics
offsets offsets.Manager
fetchInterval time.Duration
closing chan bool
closed chan bool
fetchBatchSize int
events chan Event
}
func NewMockPartitionConsumer(topics *admin.Topics, offsets offsets.Manager) *mockPartitionConsumer {
return &mockPartitionConsumer{
topics: topics,
fetchInterval: 100 * time.Microsecond,
//fetchInterval: 1 * time.Second,
fetchBatchSize: 1000,
closed: make(chan bool, 1),
closing: make(chan bool, 1),
offsets: offsets,
events: make(chan Event, 100),
}
}
func (m *mockPartitionConsumer) Consume(topic string, partition int32, offset Offset) (<-chan Event, error) {
go m.consume(topic, partition, offset)
return m.events, nil
}
func (m *mockPartitionConsumer) consume(topic string, partition int32, offset Offset) {
pt := m.topics.Topics()[topic].Partitions()[int(partition)]
var currentOffset = int64(offset)
if offset == -1 {
currentOffset = pt.Latest() + 1
}
LOOP:
for {
select {
case <-m.closing:
break LOOP
default:
}
time.Sleep(m.fetchInterval)
records, err := pt.Fetch(currentOffset, m.fetchBatchSize)
if err != nil {
log.Fatal(err)
}
if len(records) < 1 {
m.events <- &PartitionEnd{}
continue
}
partitionEnd, err := m.offsets.GetOffsetLatest(topic, partition)
if err != nil {
log.Fatal(err)
}
for _, msg := range records {
m.events <- &data.Record{
Key: msg.Key,
Value: msg.Value,
Offset: msg.Offset,
Topic: msg.Topic,
Partition: msg.Partition,
Timestamp: msg.Timestamp,
UUID: uuid.New(),
Headers: msg.Headers,
}
//if highWatermark == 0 || highWatermark-1 == msg.Offset {
if msg.Offset == partitionEnd {
m.events <- &PartitionEnd{}
}
}
currentOffset = records[len(records)-1].Offset + 1
}
m.closed <- true
}
func (m *mockPartitionConsumer) Errors() <-chan *Error {
return make(chan *Error)
}
func (m *mockPartitionConsumer) Close() error {
m.closing <- true
<-m.closed
close(m.events)
return nil
}
func (m *mockPartitionConsumer) Id() string {
panic("implement me")
}
<|start_filename|>backend/memory/memory_test.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package memory
import (
"fmt"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"testing"
"time"
)
func TestMemory_Set_Expiry(t *testing.T) {
conf := NewConfig()
conf.ExpiredRecordCleanupInterval = 1 * time.Millisecond
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
backend := NewMemoryBackend(conf)
if err := backend.Set([]byte(`100`), []byte(`100`), 10*time.Millisecond); err != nil {
log.Fatal(err)
}
time.Sleep(1 * time.Second)
r, err := backend.Get([]byte(`100`))
if err != nil {
t.Error(err)
}
if r != nil {
t.Error(`record exist`)
}
}
func TestMemory_Get(t *testing.T) {
conf := NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
backend := NewMemoryBackend(conf)
for i := 1; i <= 1000; i++ {
if err := backend.Set([]byte(fmt.Sprint(i)), []byte(`100`), 0); err != nil {
t.Fatal(err)
}
}
for i := 1; i <= 1000; i++ {
val, err := backend.Get([]byte(fmt.Sprint(i)))
if err != nil {
t.Error(err)
}
if string(val) != `100` {
t.Fail()
}
}
}
func TestMemory_Delete(t *testing.T) {
conf := NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
backend := NewMemoryBackend(conf)
if err := backend.Set([]byte(`100`), []byte(`100`), 0); err != nil {
t.Fatal(err)
}
if err := backend.Delete([]byte(`100`)); err != nil {
t.Fatal(err)
}
val, err := backend.Get([]byte(`100`))
if err != nil {
t.Error(err)
}
if val != nil {
t.Fail()
}
}
<|start_filename|>consumer/mock_partition_consumer_test.go<|end_filename|>
package consumer
import (
"fmt"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/offsets"
"testing"
)
func TestMockPartitionConsumer_Consume(t *testing.T) {
mocksTopics := admin.NewMockTopics()
kafkaAdmin := &admin.MockKafkaAdmin{
Topics: mocksTopics,
}
if err := kafkaAdmin.CreateTopics(map[string]*admin.Topic{
`tp1`: {
Name: "tp1",
NumPartitions: 1,
ReplicationFactor: 1,
},
}); err != nil {
t.Error(err)
}
tp, _ := mocksTopics.Topic(`tp1`)
pt, _ := tp.Partition(0)
t.Run(`ZeroMessage`, func(t *testing.T) {
con := NewMockPartitionConsumer(mocksTopics, &offsets.MockManager{Topics: mocksTopics})
ch, err := con.Consume(`tp1`, 0, Earliest)
if err != nil {
t.Error(err)
}
var count int
L:
for msg := range ch {
if _, ok := msg.(*PartitionEnd); ok {
break L
}
count++
}
if count != 0 {
t.Error(`expected 0 have `, count)
t.Fail()
}
})
for i := 1; i <= 3333; i++ {
err := pt.Append(&data.Record{
Key: []byte(fmt.Sprint(i)),
Value: []byte(`v`),
Topic: "tp1",
})
if err != nil {
t.Error(err)
}
}
t.Run(`Earliest`, func(t *testing.T) {
con := NewMockPartitionConsumer(mocksTopics, &offsets.MockManager{Topics: mocksTopics})
ch, err := con.Consume(`tp1`, 0, Earliest)
if err != nil {
t.Error(err)
}
var count int
L:
for msg := range ch {
if _, ok := msg.(*PartitionEnd); ok {
break L
}
count++
}
if count != 3333 {
t.Error(`expected 3333 have `, count)
t.Fail()
}
})
t.Run(`Latest`, func(t *testing.T) {
con := NewMockPartitionConsumer(mocksTopics, &offsets.MockManager{Topics: mocksTopics})
ch, err := con.Consume(`tp1`, 0, Latest)
if err != nil {
t.Error(err)
}
var count int
L:
for msg := range ch {
if _, ok := msg.(*PartitionEnd); ok {
break L
}
count++
}
if count != 0 {
t.Error(`expected 0 have `, count)
t.Fail()
}
})
}
<|start_filename|>consumer/mock_consumer.go<|end_filename|>
package consumer
import (
"context"
"github.com/google/uuid"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/offsets"
"log"
"sync"
"time"
)
type MockConsumerBuilder struct {
Builder
topics *admin.Topics
}
func NewMockConsumerBuilder(topics *admin.Topics) Builder {
return &MockConsumerBuilder{
Builder: NewBuilder(),
topics: topics,
}
}
func (mb *MockConsumerBuilder) Build(options ...BuilderOption) (Consumer, error) {
return NewMockConsumer(mb.topics), nil
}
type MockPartitionConsumerBuilder struct {
PartitionConsumerBuilder
offsets offsets.Manager
topics *admin.Topics
}
func NewMockPartitionConsumerBuilder(topics *admin.Topics, offsets offsets.Manager) PartitionConsumerBuilder {
return &MockPartitionConsumerBuilder{
PartitionConsumerBuilder: NewPartitionConsumerBuilder(),
topics: topics,
offsets: offsets,
}
}
func (mb *MockPartitionConsumerBuilder) Build(options ...BuilderOption) (PartitionConsumer, error) {
return NewMockPartitionConsumer(mb.topics, mb.offsets), nil
}
type mockConsumer struct {
topics *admin.Topics
wg *sync.WaitGroup
fetchInterval time.Duration
fetchBatchSize int
partitions chan Partition
closing chan bool
InitialOffset Offset
}
func NewMockConsumer(topics *admin.Topics) *mockConsumer {
return &mockConsumer{
topics: topics,
fetchInterval: 100 * time.Microsecond,
fetchBatchSize: 50,
wg: new(sync.WaitGroup),
InitialOffset: Earliest,
closing: make(chan bool, 1),
}
}
func (m *mockConsumer) Consume(topics []string, handler ReBalanceHandler) (chan Partition, error) {
tps := make(map[string]*mockConsumerPartition)
var assigned []TopicPartition
for _, topic := range topics {
tp, err := m.topics.Topic(topic)
if err != nil {
return nil, err
}
for p := range tp.Partitions() {
tp := TopicPartition{
Topic: topic,
Partition: int32(p),
}
assigned = append(assigned, tp)
}
}
if err := handler.OnPartitionAssigned(context.Background(), assigned); err != nil {
return nil, err
}
m.partitions = make(chan Partition, len(assigned))
for _, tp := range assigned {
consumerPartition := &mockConsumerPartition{
tp: tp,
records: make(chan *data.Record, 10000),
}
tps[tp.String()] = consumerPartition
m.partitions <- consumerPartition
m.wg.Add(1)
go m.consume(consumerPartition)
}
return m.partitions, nil
}
func (m *mockConsumer) Errors() <-chan *Error {
return make(<-chan *Error, 1)
}
func (m *mockConsumer) Close() error {
m.closing <- true
m.wg.Wait()
close(m.partitions)
return nil
}
func (m *mockConsumer) consume(partition *mockConsumerPartition) {
pt := m.topics.Topics()[partition.tp.Topic].Partitions()[partition.tp.Partition]
offset := int64(m.InitialOffset)
LOOP:
for {
select {
case <-m.closing:
break LOOP
default:
}
time.Sleep(m.fetchInterval)
records, err := pt.Fetch(offset, m.fetchBatchSize)
if err != nil {
log.Fatal(err)
}
if len(records) < 1 {
continue
}
for _, msg := range records {
partition.records <- &data.Record{
Key: msg.Key,
Value: msg.Value,
Offset: msg.Offset,
Topic: msg.Topic,
Partition: msg.Partition,
Timestamp: msg.Timestamp,
UUID: uuid.New(),
Headers: msg.Headers,
}
}
offset = records[len(records)-1].Offset + 1
}
close(partition.records)
m.wg.Done()
}
<|start_filename|>kstream/store/index.go<|end_filename|>
package store
import (
"fmt"
"sync"
)
type index struct {
indexes map[interface{}]map[interface{}]bool // indexKey:recordKey:bool
mapper func(key, val interface{}) (idx interface{})
mu *sync.Mutex
name string
}
func NewIndex(name string, mapper func(key, val interface{}) (idx interface{})) Index {
return &index{
indexes: make(map[interface{}]map[interface{}]bool),
mapper: mapper,
mu: new(sync.Mutex),
name: name,
}
}
func (s *index) String() string {
return s.name
}
func (s *index) Write(key, value interface{}) error {
s.mu.Lock()
defer s.mu.Unlock()
hashKey := s.mapper(key, value)
_, ok := s.indexes[hashKey]
if !ok {
s.indexes[hashKey] = make(map[interface{}]bool)
}
s.indexes[hashKey][key] = true
return nil
}
func (s *index) ValueIndexed(index, value interface{}) (bool, error) {
_, ok := s.indexes[index]
if !ok {
return false, nil
}
_, ok = s.indexes[index][value]
return ok, nil
}
func (s *index) Hash(key, val interface{}) (hash interface{}) {
return s.mapper(key, val)
}
func (s *index) WriteHash(hash, key interface{}) error {
_, ok := s.indexes[hash]
if !ok {
s.indexes[hash] = make(map[interface{}]bool)
}
s.indexes[hash][key] = true
return nil
}
func (s *index) Delete(key, value interface{}) error {
s.mu.Lock()
defer s.mu.Unlock()
hashKey := s.mapper(key, value)
if _, ok := s.indexes[hashKey]; !ok {
return fmt.Errorf(`hashKey [%s] does not exist for [%s]`, hashKey, s.name)
}
delete(s.indexes[hashKey], key)
return nil
}
func (s *index) Keys() []interface{} {
s.mu.Lock()
defer s.mu.Unlock()
var keys []interface{}
for key := range s.indexes {
keys = append(keys, key)
}
return keys
}
func (s *index) Values() map[interface{}][]interface{} {
s.mu.Lock()
defer s.mu.Unlock()
values := make(map[interface{}][]interface{})
for idx, keys := range s.indexes {
for key := range keys {
values[idx] = append(values[idx], key)
}
}
return values
}
func (s *index) Read(key interface{}) ([]interface{}, error) {
s.mu.Lock()
defer s.mu.Unlock()
var indexes []interface{}
index, ok := s.indexes[key]
if !ok {
return nil, UnknownIndex
}
for k := range index {
indexes = append(indexes, k)
}
return indexes, nil
}
<|start_filename|>kstream/context/context_test.go<|end_filename|>
package context
import (
"context"
"github.com/tryfix/kstream/data"
"reflect"
"testing"
)
func TestFromRecord(t *testing.T) {
//type args struct {
// parent context.Context
// record *data.Record
//}
//
//ctx := context.WithValue(context.Background(), `foo`, `bar`)
//kafkaRec := &data.Record{
// Key: nil,
// Value: nil,
// Topic: "test",
// Partition: 0,
// Offset: 0,
// Timestamp: time.Time{},
// BlockTimestamp: time.Time{},
// RecordHeaders: nil,
// UUID: uuid.UUID{},
//}
//
//tests := []struct {
// name string
// args args
// want context.Context
//}{
// {name: `default`, args: args{
// parent: ctx,
// record: nil,
// }, want: Context{}},
//}
//for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// if got := FromRecord(tt.args.parent, tt.args.record); !reflect.DeepEqual(got, tt.want) {
// t.Errorf("FromRecord() = %v, want %v", got, tt.want)
// }
// })
//}
}
func TestMeta(t *testing.T) {
type args struct {
ctx context.Context
}
tests := []struct {
name string
args args
want *RecordMeta
}{
// TODO: Add test cases.
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := Meta(tt.args.ctx); !reflect.DeepEqual(got, tt.want) {
t.Errorf("Meta() = %v, want %v", got, tt.want)
}
})
}
}
func TestRecordFromContext(t *testing.T) {
type args struct {
ctx context.Context
key []byte
val []byte
}
tests := []struct {
name string
args args
want *data.Record
wantErr bool
}{
// TODO: Add test cases.
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := RecordFromContext(tt.args.ctx, tt.args.key, tt.args.val)
if (err != nil) != tt.wantErr {
t.Errorf("RecordFromContext() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("RecordFromContext() got = %v, want %v", got, tt.want)
}
})
}
}
<|start_filename|>kstream/encoding/encoder.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package encoding
type Builder func() Encoder
type Encoder interface {
Encode(data interface{}) ([]byte, error)
Decode(data []byte) (interface{}, error)
}
<|start_filename|>kstream/encoding/string_encoder.go<|end_filename|>
package encoding
import (
"github.com/tryfix/errors"
"reflect"
)
type StringEncoder struct{}
func (s StringEncoder) Encode(v interface{}) ([]byte, error) {
str, ok := v.(string)
if !ok {
return nil, errors.Errorf(`invalid type [%+v] expected string`, reflect.TypeOf(v))
}
return []byte(str), nil
}
func (s StringEncoder) Decode(data []byte) (interface{}, error) {
return string(data), nil
}
<|start_filename|>examples/example_1/encoders/encoders.go<|end_filename|>
package encoders
import (
"github.com/tryfix/kstream/examples/example_1/events"
"github.com/tryfix/kstream/kstream/encoding"
)
var KeyEncoder = func() encoding.Encoder { return Int64Encoder{} }
var UuidKeyEncoder = func() encoding.Encoder { return UuidEncoder{} }
var TransactionReceivedEncoder = func() encoding.Encoder { return TransactionEncoder{} }
var AccountCreditedEncoder = func() encoding.Encoder { return events.AccountCredited{} }
var AccountDebitedEncoder = func() encoding.Encoder { return events.AccountDebited{} }
var AccountDetailsUpdatedEncoder = func() encoding.Encoder { return events.AccountDetailsUpdated{} }
var CustomerProfileUpdatedEncoder = func() encoding.Encoder { return events.CustomerProfileUpdated{} }
var MessageEncoder = func() encoding.Encoder { return events.MessageCreated{} }
<|start_filename|>consumer/builder.go<|end_filename|>
package consumer
import (
"github.com/tryfix/log"
"github.com/tryfix/metrics"
)
type BuilderOption func(config *Config)
func BuilderWithId(id string) BuilderOption {
return func(config *Config) {
config.Id = id
}
}
func BuilderWithOptions(options ...Option) BuilderOption {
return func(config *Config) {
config.options.apply(options...)
}
}
func BuilderWithGroupId(id string) BuilderOption {
return func(config *Config) {
config.GroupId = id
}
}
func BuilderWithMetricsReporter(reporter metrics.Reporter) BuilderOption {
return func(config *Config) {
config.MetricsReporter = reporter
}
}
func BuilderWithLogger(logger log.Logger) BuilderOption {
return func(config *Config) {
config.Logger = logger
}
}
type Builder interface {
Config() *Config
Build(options ...BuilderOption) (Consumer, error)
}
type builder struct {
config *Config
}
func NewBuilder() Builder {
return &builder{
config: NewConsumerConfig(),
}
}
func (b *builder) Config() *Config {
return b.config
}
func (b *builder) Build(options ...BuilderOption) (Consumer, error) {
conf := *b.config
for _, option := range options {
option(&conf)
}
return NewConsumer(&conf)
}
type PartitionConsumerBuilder interface {
Config() *Config
Build(options ...BuilderOption) (PartitionConsumer, error)
}
type partitionConsumerBuilder struct {
config *Config
}
func NewPartitionConsumerBuilder() PartitionConsumerBuilder {
return &partitionConsumerBuilder{
config: NewConsumerConfig(),
}
}
func (b *partitionConsumerBuilder) Config() *Config {
return &*b.config
}
func (b *partitionConsumerBuilder) Configure(c *Config) PartitionConsumerBuilder {
return &partitionConsumerBuilder{
config: c,
}
}
func (b *partitionConsumerBuilder) Build(options ...BuilderOption) (PartitionConsumer, error) {
conf := *b.config
for _, option := range options {
option(&conf)
}
return NewPartitionConsumer(&conf)
}
<|start_filename|>kstream/encoding/int_encoder_test.go<|end_filename|>
package encoding
import (
"reflect"
"testing"
)
func TestIntEncoder_Decode(t *testing.T) {
type args struct {
data []byte
}
tests := []struct {
name string
args args
want interface{}
wantErr bool
}{
{name: `should_decode`, args: args{data: []byte(`1`)}, want: 1, wantErr: false},
{name: `should_return_error`, args: args{data: []byte(`ss`)}, want: nil, wantErr: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
in := IntEncoder{}
got, err := in.Decode(tt.args.data)
if (err != nil) != tt.wantErr {
t.Errorf("Decode() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Decode() got = %v, want %v", got, tt.want)
}
})
}
}
func TestIntEncoder_Encode(t *testing.T) {
type args struct {
v interface{}
}
tests := []struct {
name string
args args
want []byte
wantErr bool
}{
{name: `should_decode`, args: args{100}, want: []byte(`100`), wantErr: false},
{name: `should_return_error`, args: args{nil}, want: nil, wantErr: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
in := IntEncoder{}
got, err := in.Encode(tt.args.v)
if (err != nil) != tt.wantErr {
t.Errorf("Encode() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Encode() got = %v, want %v", got, tt.want)
}
})
}
}
<|start_filename|>examples/example_1/events/account_details_updated.go<|end_filename|>
package events
import (
"encoding/json"
"github.com/google/uuid"
)
type AccountDetailsUpdated struct {
ID string `json:"id"`
Type string `json:"type"`
Body struct {
AccountNo int64 `json:"account_no"`
AccountType string `json:"account_type"`
CustomerID uuid.UUID `json:"customer_id"`
Branch string `json:"branch"`
BranchCode int `json:"branch_code"`
UpdatedAt int64 `json:"updated_at"`
} `json:"body"`
Timestamp int64 `json:"timestamp"`
}
func (a AccountDetailsUpdated) Encode(data interface{}) ([]byte, error) {
b, err := json.Marshal(data)
if err != nil {
return nil, err
}
return b, nil
}
func (a AccountDetailsUpdated) Decode(data []byte) (interface{}, error) {
ad := AccountDetailsUpdated{}
err := json.Unmarshal(data, &ad)
if err != nil {
return nil, err
}
return ad, nil
}
<|start_filename|>examples/example_2/stream/a.go<|end_filename|>
package stream
import (
"context"
"fmt"
"github.com/tryfix/kstream/examples/example_2/events"
kstream "github.com/tryfix/kstream/kstream"
"github.com/tryfix/kstream/kstream/branch"
"github.com/tryfix/log"
"time"
)
type AStream struct {
Upstream kstream.Stream
}
func (ac AStream) Init() {
branches := ac.Upstream.Branch([]branch.Details{
{
Name: `a_branch`,
Predicate: func(ctx context.Context, key interface{}, val interface{}) (b bool, e error) {
_, ok := val.(events.AA)
return ok, nil
},
},
{
Name: `b_branch`,
Predicate: func(ctx context.Context, key interface{}, val interface{}) (b bool, e error) {
_, ok := val.(events.BB)
return ok, nil
},
},
{
Name: `c_branch`,
Predicate: func(ctx context.Context, key interface{}, val interface{}) (b bool, e error) {
_, ok := val.(events.CC)
return ok, nil
},
},
})
filteredAStream := branches[0].
Filter(ac.filterAFromTimestamp).
Process(func(ctx context.Context, key, value interface{}) error {
a := value.(events.AA)
log.Info(fmt.Sprintf(`a stream received with key %v, and value %+v`, key, a))
return nil
})
filteredBStream := branches[1].
Filter(ac.filterBFromTimestamp).
Process(func(ctx context.Context, key, value interface{}) error {
b := value.(events.BB)
log.Info(fmt.Sprintf(`b stream received with key %v, and value %+v`, key, b))
return nil
})
filteredCStream := branches[2].
Filter(ac.filterCFromTimestamp).
Process(func(ctx context.Context, key, value interface{}) error {
c := value.(events.CC)
log.Info(fmt.Sprintf(`c stream received with key %v, and value %+v`, key, c))
return nil
})
ABJoinedStream := filteredAStream.JoinStream(filteredBStream, func(left, right interface{}) (joined interface{}, err error) {
a := left.(events.AA)
b := right.(events.BB)
return events.AB{
ID: a.ID,
Type: "AB",
AAA: a.AAA,
BBB: b.BBB,
TimestampA: a.Timestamp,
TimestampB: b.Timestamp,
}, nil
})
ABJoinedProcessedStream := ABJoinedStream.Process(func(ctx context.Context, key, value interface{}) error {
ab := value.(events.AB)
log.Info(fmt.Sprintf(`joined ab received with key %v, and value %+v`, key, ab))
return nil
})
ABCJoinedStream := ABJoinedProcessedStream.JoinStream(filteredCStream, func(left, right interface{}) (joined interface{}, err error) {
ab := left.(events.AB)
c := right.(events.CC)
return events.ABC{
ID: ab.ID,
Type: "ABC",
AAA: ab.AAA,
BBB: ab.BBB,
CCC: c.CCC,
TimestampA: ab.TimestampA,
TimestampB: ab.TimestampB,
TimestampC: c.Timestamp,
}, nil
})
ABCJoinedStream.Process(func(ctx context.Context, key, value interface{}) error {
abc := value.(events.ABC)
log.Info(fmt.Sprintf(`joined abc received with key %v, and value %+v`, key, abc))
return nil
})
}
func (ac AStream) filterAFromTimestamp(ctx context.Context, key, value interface{}) (b bool, e error) {
accCredited, _ := value.(events.AA)
if time.Now().UnixNano()/1e6-accCredited.Timestamp > 300000 {
return false, nil
}
return true, nil
}
func (ac AStream) filterBFromTimestamp(ctx context.Context, key, value interface{}) (b bool, e error) {
accCredited, _ := value.(events.BB)
if time.Now().UnixNano()/1e6-accCredited.Timestamp > 300000 {
return false, nil
}
return true, nil
}
func (ac AStream) filterCFromTimestamp(ctx context.Context, key, value interface{}) (b bool, e error) {
accCredited, _ := value.(events.CC)
if time.Now().UnixNano()/1e6-accCredited.Timestamp > 300000 {
return false, nil
}
return true, nil
}
<|start_filename|>kstream/context/context.go<|end_filename|>
package context
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/data"
"github.com/tryfix/traceable-context"
"time"
)
var recordMeta = `rc_meta`
type RecordMeta struct {
Topic string
Partition int32
Offset int64
Timestamp time.Time
Headers data.RecordHeaders
}
type Context struct {
context.Context
}
func FromRecord(parent context.Context, record *data.Record) context.Context {
return traceable_context.WithValue(parent, &recordMeta, &RecordMeta{
Topic: record.Topic,
Offset: record.Offset,
Partition: record.Partition,
Timestamp: record.Timestamp,
Headers: record.Headers,
})
}
func RecordFromContext(ctx context.Context, key []byte, val []byte) (*data.Record, error) {
if c, ok := ctx.(*Context); ok {
meta := Meta(c)
return &data.Record{
Topic: meta.Topic,
Partition: meta.Partition,
Offset: meta.Offset,
Timestamp: meta.Timestamp,
Key: key,
Value: val,
Headers: meta.Headers,
}, nil
}
return nil, errors.New(`invalid context expected [k-stream.context]`)
}
func Meta(ctx context.Context) *RecordMeta {
if meta, ok := ctx.Value(&recordMeta).(*RecordMeta); ok {
return meta
}
panic(`k-stream.context meta not available`)
}
<|start_filename|>kstream/store/iterator.go<|end_filename|>
package store
import (
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/kstream/encoding"
)
type Iterator interface {
SeekToFirst()
SeekToLast()
Seek(key interface{}) error
Next()
Prev()
Close()
Key() (interface{}, error)
Value() (interface{}, error)
Valid() bool
Error() error
}
type iterator struct {
iterator backend.Iterator
keyEncoder encoding.Encoder
valEncoder encoding.Encoder
}
func (i *iterator) SeekToFirst() {
i.iterator.SeekToFirst()
}
func (i *iterator) SeekToLast() {
i.iterator.SeekToLast()
}
func (i *iterator) Seek(key interface{}) error {
k, err := i.keyEncoder.Encode(key)
if err != nil {
return err
}
i.iterator.Seek(k)
return nil
}
func (i *iterator) Next() {
i.iterator.Next()
}
func (i *iterator) Prev() {
i.iterator.Prev()
}
func (i *iterator) Close() {
i.iterator.Close()
}
func (i *iterator) Key() (interface{}, error) {
k := i.iterator.Key()
if len(k) < 1 {
return nil, nil
}
return i.keyEncoder.Decode(k)
}
func (i *iterator) Value() (interface{}, error) {
v := i.iterator.Value()
if len(v) < 1 {
return nil, nil
}
return i.valEncoder.Decode(v)
}
func (i *iterator) Valid() bool {
return i.iterator.Valid()
}
func (i *iterator) Error() error {
return i.iterator.Error()
}
<|start_filename|>backend/memory/memory_bench_test.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package memory
import (
"fmt"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"math/rand"
"testing"
)
func BenchmarkMemory_Set(b *testing.B) {
conf := NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
backend := NewMemoryBackend(conf)
b.ResetTimer()
b.ReportAllocs()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := backend.Set([]byte(`100`), []byte(`100`), 0); err != nil {
log.Fatal(err)
}
}
})
}
func BenchmarkMemory_Get(b *testing.B) {
conf := NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
backend := NewMemoryBackend(conf)
numOfRecs := 1000000
for i := 1; i <= numOfRecs; i++ {
if err := backend.Set([]byte(fmt.Sprint(i)), []byte(`100`), 0); err != nil {
log.Fatal(err)
}
}
b.ResetTimer()
b.ReportAllocs()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
k := fmt.Sprint(rand.Intn(numOfRecs-1) + 1)
if _, err := backend.Get([]byte(k)); err != nil {
b.Error(err)
}
}
})
}
func BenchmarkMemory_GetSet(b *testing.B) {
conf := NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
backend := NewMemoryBackend(conf)
for i := 1; i <= 99999; i++ {
if err := backend.Set([]byte(fmt.Sprint(rand.Intn(1000)+1)), []byte(`100`), 0); err != nil {
log.Fatal(err)
}
}
b.ResetTimer()
go func() {
for {
if err := backend.Set([]byte(fmt.Sprint(rand.Intn(1000)+1)), []byte(`100`), 0); err != nil {
b.Fatal(err)
}
}
}()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := backend.Get([]byte(fmt.Sprint(rand.Intn(1000) + 1))); err != nil {
b.Fatal(err)
}
}
})
}
func BenchmarkMemory_Iterator(b *testing.B) {
conf := NewConfig()
conf.Logger = log.NewNoopLogger()
conf.MetricsReporter = metrics.NoopReporter()
backend := NewMemoryBackend(conf)
for i := 1; i <= 999999; i++ {
if err := backend.Set([]byte(fmt.Sprint(rand.Intn(999999)+1)), []byte(`100`), 0); err != nil {
log.Fatal(err)
}
}
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
i := backend.Iterator()
for i.Valid() {
if i.Error() != nil {
i.Next()
continue
}
i.Next()
}
}
})
}
<|start_filename|>admin/kafka_admin.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
// Package admin provides an interface for kafka administrative operations
package admin
import (
"fmt"
"github.com/Shopify/sarama"
"github.com/tryfix/errors"
"github.com/tryfix/log"
)
type Partition struct {
Id int32
Error error
}
type Topic struct {
Name string
Partitions []Partition
Error error
NumPartitions int32
ReplicationFactor int16
ReplicaAssignment map[int32][]int32
ConfigEntries map[string]string
}
// KafkaAdmin
type KafkaAdmin interface {
FetchInfo(topics []string) (map[string]*Topic, error)
CreateTopics(topics map[string]*Topic) error
DeleteTopics(topics []string) (map[string]error, error)
Close()
}
type kafkaAdminOptions struct {
BootstrapServers []string
KafkaVersion sarama.KafkaVersion
Logger log.Logger
}
func (opts *kafkaAdminOptions) apply(options ...KafkaAdminOption) {
opts.KafkaVersion = sarama.V2_4_0_0
opts.Logger = log.NewNoopLogger()
for _, opt := range options {
opt(opts)
}
}
type KafkaAdminOption func(*kafkaAdminOptions)
func WithKafkaVersion(version sarama.KafkaVersion) KafkaAdminOption {
return func(options *kafkaAdminOptions) {
options.KafkaVersion = version
}
}
func WithLogger(logger log.Logger) KafkaAdminOption {
return func(options *kafkaAdminOptions) {
options.Logger = logger
}
}
type kafkaAdmin struct {
admin sarama.ClusterAdmin
logger log.Logger
}
func NewKafkaAdmin(bootstrapServer []string, options ...KafkaAdminOption) *kafkaAdmin {
opts := new(kafkaAdminOptions)
opts.apply(options...)
saramaConfig := sarama.NewConfig()
saramaConfig.Version = opts.KafkaVersion
logger := opts.Logger.NewLog(log.Prefixed(`kafka-admin`))
admin, err := sarama.NewClusterAdmin(bootstrapServer, saramaConfig)
if err != nil {
logger.Fatal(fmt.Sprintf(`cannot get controller - %+v`, err))
}
return &kafkaAdmin{
admin: admin,
logger: logger,
}
}
func (c *kafkaAdmin) FetchInfo(topics []string) (map[string]*Topic, error) {
topicInfo := make(map[string]*Topic)
topicMeta, err := c.admin.DescribeTopics(topics)
if err != nil {
return nil, errors.WithPrevious(err, `cannot get metadata : `)
}
for _, tp := range topicMeta {
var pts []Partition
for _, pt := range tp.Partitions {
pts = append(pts, Partition{
Id: pt.ID,
Error: pt.Err,
})
}
topicInfo[tp.Name] = &Topic{
Name: tp.Name,
Partitions: pts,
NumPartitions: int32(len(pts)),
}
if tp.Err != sarama.ErrNoError {
topicInfo[tp.Name].Error = tp.Err
}
// configs
confs, err := c.admin.DescribeConfig(sarama.ConfigResource{
Type: sarama.TopicResource,
Name: tp.Name,
ConfigNames: []string{`cleanup.policy`, `min.insync.replicas`, `retention.ms`},
})
if err != nil {
return nil, err
}
topicInfo[tp.Name].ConfigEntries = map[string]string{}
for _, co := range confs {
topicInfo[tp.Name].ConfigEntries[co.Name] = co.Value
}
}
return topicInfo, nil
}
func (c *kafkaAdmin) CreateTopics(topics map[string]*Topic) error {
for name, info := range topics {
details := &sarama.TopicDetail{
NumPartitions: info.NumPartitions,
ReplicationFactor: info.ReplicationFactor,
ReplicaAssignment: info.ReplicaAssignment,
}
details.ConfigEntries = map[string]*string{}
for cName, config := range info.ConfigEntries {
details.ConfigEntries[cName] = &config
}
err := c.admin.CreateTopic(name, details, false)
if err != nil {
if e, ok := err.(*sarama.TopicError); ok && (e.Err == sarama.ErrTopicAlreadyExists || e.Err == sarama.ErrNoError) {
c.logger.Warn(err)
continue
}
return errors.WithPrevious(err, `could not create topic`)
}
}
c.logger.Info(`k-stream.kafkaAdmin`,
fmt.Sprintf(`kafkaAdmin topics created - %+v`, topics))
return nil
}
func (c *kafkaAdmin) DeleteTopics(topics []string) (map[string]error, error) {
for _, topic := range topics {
err := c.admin.DeleteTopic(topic)
if err != nil {
return nil, errors.WithPrevious(err, `could not delete topic :`)
}
}
return make(map[string]error), nil
}
func (c *kafkaAdmin) Close() {
if err := c.admin.Close(); err != nil {
c.logger.Warn(`k-stream.kafkaAdmin`,
fmt.Sprintf(`kafkaAdmin cannot close broker : %+v`, err))
}
}
<|start_filename|>kstream/topology/topology.go<|end_filename|>
package topology
import "context"
type TopologyBuilder struct {
Source SourceBuilder
SourceNodeBuilder NodeBuilder
}
func (tb TopologyBuilder) Build() (Topology, error) {
topology := Topology{}
sourceNode, err := tb.SourceNodeBuilder.Build()
if err != nil {
return topology, err
}
source, err := tb.Source.Build()
if err != nil {
return topology, err
}
topology.SourceNode = sourceNode
topology.Source = source
return topology, nil
}
type Topology struct {
Source Source
SourceNode Node
}
func (t Topology) Run(ctx context.Context, kIn, vIn []byte) (kOut, vOut interface{}, err error) {
kOut, vOut, err = t.Source.Run(ctx, kIn, vIn)
if err != nil {
return nil, nil, err
}
_, _, _, err = t.SourceNode.Run(ctx, kOut, vOut)
if err != nil {
return nil, nil, err
}
return
}
<|start_filename|>kstream/store/uuid_hash_index.go<|end_filename|>
package store
import (
"fmt"
"github.com/google/uuid"
"github.com/tryfix/errors"
"reflect"
"sync"
)
type uuidHashIndex struct {
indexes map[uuid.UUID]map[interface{}]bool // indexKey:recordKey:bool
mapper func(key, val interface{}) (idx uuid.UUID)
mu *sync.Mutex
name string
}
func NewUuidHashIndex(name string, mapper func(key, val interface{}) (idx uuid.UUID)) Index {
return &uuidHashIndex{
indexes: make(map[uuid.UUID]map[interface{}]bool),
mapper: mapper,
mu: new(sync.Mutex),
name: name,
}
}
func (s *uuidHashIndex) String() string {
return s.name
}
func (s *uuidHashIndex) Write(key, value interface{}) error {
s.mu.Lock()
defer s.mu.Unlock()
hashKey := s.mapper(key, value)
_, ok := s.indexes[hashKey]
if !ok {
s.indexes[hashKey] = make(map[interface{}]bool)
}
s.indexes[hashKey][key] = true
return nil
}
func (s *uuidHashIndex) ValueIndexed(index, value interface{}) (bool, error) {
hStr, ok := index.(uuid.UUID)
if !ok {
return false, errors.New(fmt.Sprintf(`unsupported hash type expected [string] given [%s]`, reflect.TypeOf(index)))
}
_, ok = s.indexes[hStr]
if !ok {
return false, nil
}
_, ok = s.indexes[hStr][value]
return ok, nil
}
func (s *uuidHashIndex) Hash(key, val interface{}) (hash interface{}) {
return s.mapper(key, val)
}
func (s *uuidHashIndex) WriteHash(hash, key interface{}) error {
hStr, ok := hash.(uuid.UUID)
if !ok {
return errors.New(fmt.Sprintf(`unsupported hash type expected [string] given [%s]`, reflect.TypeOf(hash)))
}
_, ok = s.indexes[hStr]
if !ok {
s.indexes[hStr] = make(map[interface{}]bool)
}
s.indexes[hStr][key] = true
return nil
}
func (s *uuidHashIndex) Delete(key, value interface{}) error {
s.mu.Lock()
defer s.mu.Unlock()
hashKey := s.mapper(key, value)
if _, ok := s.indexes[hashKey]; !ok {
return fmt.Errorf(`hashKey %s does not exist for %s`, hashKey, s.name)
}
delete(s.indexes[hashKey], key)
return nil
}
func (s *uuidHashIndex) Keys() []interface{} {
s.mu.Lock()
defer s.mu.Unlock()
var keys []interface{}
for key := range s.indexes {
keys = append(keys, key)
}
return keys
}
func (s *uuidHashIndex) Values() map[interface{}][]interface{} {
s.mu.Lock()
defer s.mu.Unlock()
values := make(map[interface{}][]interface{})
for idx, keys := range s.indexes {
for key := range keys {
values[idx] = append(values[idx], key)
}
}
return values
}
func (s *uuidHashIndex) Read(key interface{}) ([]interface{}, error) {
s.mu.Lock()
defer s.mu.Unlock()
var indexes []interface{}
index, ok := s.indexes[key.(uuid.UUID)]
if !ok {
return nil, UnknownIndex
}
for k := range index {
indexes = append(indexes, k)
}
return indexes, nil
}
<|start_filename|>consumer/config.go<|end_filename|>
package consumer
import (
"github.com/Shopify/sarama"
"github.com/tryfix/errors"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
)
type Config struct {
Id string
GroupId string
BootstrapServers []string
MetricsReporter metrics.Reporter
Logger log.Logger
options *consumerOptions
*sarama.Config
}
func (c *Config) validate() error {
if err := c.Config.Validate(); err != nil {
return err
}
if c.GroupId == `` {
return errors.New(`k-stream.consumer.Config: Consumer.groupId cannot be empty`)
}
if len(c.BootstrapServers) < 1 {
return errors.New(`k-stream.consumer.Config: Consumer.BootstrapServers cannot be empty`)
}
return nil
}
func NewConsumerConfig() *Config {
c := new(Config)
c.setDefaults()
return c
}
func (c *Config) setDefaults() {
c.Config = sarama.NewConfig()
c.Config.Version = sarama.V2_3_0_0
c.Consumer.Return.Errors = true
c.ChannelBufferSize = 100
c.MetricsReporter = metrics.NoopReporter()
c.Logger = log.NewNoopLogger()
c.options = new(consumerOptions)
c.options.applyDefault()
}
<|start_filename|>backend/memory/partition_memory_bench_test.go<|end_filename|>
package memory
import (
"fmt"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"math/rand"
"sync"
"testing"
)
var pMemory = NewPartitionMemoryBackend(1000, log.Constructor.Log(), metrics.NoopReporter())
func BenchmarkPartitionMemory_Set(b *testing.B) {
pMemory := NewPartitionMemoryBackend(1000, log.Constructor.Log(), metrics.NoopReporter())
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := pMemory.Set([]byte(fmt.Sprint(rand.Intn(1000)+1)), []byte(`100`), 0); err != nil {
log.Fatal(err)
}
}
})
}
func BenchmarkPartitionMemory_Get(b *testing.B) {
pMemory := NewPartitionMemoryBackend(100, log.Constructor.Log(), metrics.NoopReporter())
for i := 1; i <= 10000; i++ {
if err := pMemory.Set([]byte(fmt.Sprint(i)), []byte(`100`), 0); err != nil {
log.Fatal(err)
}
}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := pMemory.Get([]byte(fmt.Sprint(rand.Intn(100) + 1))); err != nil {
log.Fatal(err)
}
}
})
}
func BenchmarkPartitionMemory_SetGet(b *testing.B) {
pMemory := NewPartitionMemoryBackend(100, log.Constructor.Log(), metrics.NoopReporter())
for i := 1; i <= 99999; i++ {
if err := pMemory.Set([]byte(fmt.Sprint(rand.Intn(1000)+1)), []byte(`100`), 0); err != nil {
log.Fatal(err)
}
}
go func() {
for {
if err := pMemory.Set([]byte(fmt.Sprint(rand.Intn(1000)+1)), []byte(`100`), 0); err != nil {
b.Fatal(err)
}
}
}()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := pMemory.Get([]byte(fmt.Sprint(rand.Intn(1000) + 1))); err != nil {
b.Fatal(err)
}
}
})
}
func BenchmarkPartitionMemory_Iterator(b *testing.B) {
//var backend = NewPartitionMemoryBackend(1000)
//var backend = NewPartitionMemoryBackend(100)
for i := 1; i <= 99999; i++ {
if err := pMemory.Set([]byte(fmt.Sprint(rand.Intn(9999)+1)), []byte(`100`), 0); err != nil {
log.Fatal(err)
}
}
//iterators := pMemory.Partitions()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
/*it := pMemory.Iterator()
for it.Valid() {
if it.Error() != nil {
it.Next()
continue
}
_, err := encoders.DriverLocationSyncEncoderBuilder().Decode(it.Value())
if err != nil {
it.Next()
continue
}
it.Next()
}*/
iterators := pMemory.Partitions()
wg := new(sync.WaitGroup)
for _, i := range iterators {
wg.Add(1)
go func(it backend.Iterator, wg *sync.WaitGroup) {
defer wg.Done()
for it.Valid() {
if it.Error() != nil {
it.Next()
continue
}
it.Next()
continue
it.Next()
}
}(i, wg)
}
wg.Wait()
}
})
}
<|start_filename|>kstream/store/hash_index.go<|end_filename|>
package store
import (
"errors"
"fmt"
"reflect"
"sync"
)
type KeyMapper func(key, val interface{}) (idx string)
var UnknownIndex = errors.New(`index does not exist`)
type stringHashIndex struct {
indexes map[string]map[interface{}]bool // indexKey:recordKey:bool
mapper KeyMapper
mu *sync.Mutex
name string
}
func NewStringHashIndex(name string, mapper KeyMapper) Index {
return &stringHashIndex{
indexes: make(map[string]map[interface{}]bool),
mapper: mapper,
mu: new(sync.Mutex),
name: name,
}
}
func (s *stringHashIndex) String() string {
return s.name
}
func (s *stringHashIndex) Write(key, value interface{}) error {
s.mu.Lock()
defer s.mu.Unlock()
hashKey := s.mapper(key, value)
_, ok := s.indexes[hashKey]
if !ok {
s.indexes[hashKey] = make(map[interface{}]bool)
}
s.indexes[hashKey][key] = true
return nil
}
func (s *stringHashIndex) ValueIndexed(index, value interface{}) (bool, error) {
hStr, ok := index.(string)
if !ok {
return false, errors.New(fmt.Sprintf(`unsupported hash type expected [string] given [%s]`, reflect.TypeOf(index)))
}
_, ok = s.indexes[hStr]
if !ok {
return false, nil
}
_, ok = s.indexes[hStr][value]
return ok, nil
}
func (s *stringHashIndex) Hash(key, val interface{}) (hash interface{}) {
return s.mapper(key, val)
}
func (s *stringHashIndex) WriteHash(hash, key interface{}) error {
hStr, ok := hash.(string)
if !ok {
return errors.New(fmt.Sprintf(`unsupported hash type expected [string] given [%s]`, reflect.TypeOf(hash)))
}
_, ok = s.indexes[hStr]
if !ok {
s.indexes[hStr] = make(map[interface{}]bool)
}
s.indexes[hStr][key] = true
return nil
}
func (s *stringHashIndex) Delete(key, value interface{}) error {
s.mu.Lock()
defer s.mu.Unlock()
hashKey := s.mapper(key, value)
if _, ok := s.indexes[hashKey]; !ok {
return fmt.Errorf(`hashKey [%s] does not exist for [%s]`, hashKey, s.name)
}
delete(s.indexes[hashKey], key)
return nil
}
func (s *stringHashIndex) Keys() []interface{} {
s.mu.Lock()
defer s.mu.Unlock()
var keys []interface{}
for key := range s.indexes {
keys = append(keys, key)
}
return keys
}
func (s *stringHashIndex) Values() map[interface{}][]interface{} {
s.mu.Lock()
defer s.mu.Unlock()
values := make(map[interface{}][]interface{})
for idx, keys := range s.indexes {
for key := range keys {
values[idx] = append(values[idx], key)
}
}
return values
}
func (s *stringHashIndex) Read(key interface{}) ([]interface{}, error) {
s.mu.Lock()
defer s.mu.Unlock()
var indexes []interface{}
index, ok := s.indexes[key.(string)]
if !ok {
return nil, UnknownIndex
}
for k := range index {
indexes = append(indexes, k)
}
return indexes, nil
}
<|start_filename|>kstream/processors/join/global_table_joiner_bench_test.go<|end_filename|>
package join
import (
"context"
"testing"
)
func BenchmarkGlobalTableJoiner(b *testing.B) {
ctx := context.Background()
if err := testStore.Set(ctx, 200, rightRecord{
PrimaryKey: 100,
ForeignKey: 200,
}, 0); err != nil {
b.Error(err)
}
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
j := makeJoiner(InnerJoin)
_, err := j.Join(ctx, 100, leftRecord{
PrimaryKey: 100,
ForeignKey: 200,
})
if err != nil {
b.Error(err)
}
}
})
}
<|start_filename|>examples/example_2/stream/init.go<|end_filename|>
package stream
import (
"github.com/google/uuid"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/examples/example_1/encoders"
"github.com/tryfix/kstream/kstream"
"github.com/tryfix/kstream/kstream/worker_pool"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"os"
"os/signal"
)
func Init() {
log.StdLogger = log.Constructor.Log(
log.WithLevel(`TRACE`),
log.WithColors(true),
)
Logger := log.NewLog(
log.WithLevel(`TRACE`),
log.WithColors(true),
).Log()
builderConfig := kstream.NewStreamBuilderConfig()
builderConfig.BootstrapServers = []string{`localhost:9092`}
builderConfig.ApplicationId = `k_stream_example_2`
builderConfig.ConsumerCount = 1
builderConfig.Host = `localhost:8100`
builderConfig.AsyncProcessing = true
//builderConfig.Store.StorageDir = `storage`
builderConfig.Store.Http.Host = `:9002`
builderConfig.ChangeLog.Enabled = false
//builderConfig.ChangeLog.Buffer.Enabled = true
//builderConfig.ChangeLog.Buffer.Size = 100
//builderConfig.ChangeLog.ReplicationFactor = 3
//builderConfig.ChangeLog.MinInSycReplicas = 2
builderConfig.WorkerPool.Order = worker_pool.OrderByKey
builderConfig.WorkerPool.NumOfWorkers = 100
builderConfig.WorkerPool.WorkerBufferSize = 10
builderConfig.MetricsReporter = metrics.PrometheusReporter(metrics.ReporterConf{`streams`, `k_stream_test`, nil})
builderConfig.Logger = Logger
//builderConfig.Producer.Pool.NumOfWorkers = 1
builder := kstream.NewStreamBuilder(builderConfig)
builder.StoreRegistry().New(
`account_detail_store`,
encoders.KeyEncoder,
encoders.AccountDetailsUpdatedEncoder)
builder.StoreRegistry().New(
`customer_profile_store`,
encoders.KeyEncoder,
encoders.CustomerProfileUpdatedEncoder)
err := builder.Build(InitStreams(builder)...)
if err != nil {
log.Fatal(log.WithPrefix(`boot.boot.Init`, `error in stream building`), err)
}
synced := make(chan bool, 1)
// trap SIGINT to trigger a shutdown.
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
stream := kstream.NewStreams(builder,
kstream.NotifyOnStart(synced),
kstream.WithConsumerOptions(consumer.WithRecordUuidExtractFunc(func(message *data.Record) uuid.UUID {
// extract uuid from header
id, err := uuid.Parse(string(message.Key))
if err != nil {
return uuid.New()
}
return id
})),
)
go func() {
select {
case <-signals:
stream.Stop()
}
}()
if err := stream.Start(); err != nil {
log.Fatal(log.WithPrefix(`boot.boot.Init`, `error in stream starting`), err)
}
}
func InitStreams(builder *kstream.StreamBuilder) []kstream.Stream {
commonStream := initCommonStream(builder)
return []kstream.Stream{commonStream}
}
<|start_filename|>kstream/global_table_stream.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package kstream
import (
"context"
"fmt"
"github.com/Shopify/sarama"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/admin"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/kstream/offsets"
"github.com/tryfix/kstream/kstream/store"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"sync"
)
var offsetBackendName = `__k-table-offsets`
type StoreWriter func(r *data.Record, store store.Store) error
type RecordVersionExtractor func(ctx context.Context, key, value interface{}) (int64, error)
type RecordVersionComparator func(newVersion, currentVersion int64) bool
type tp struct {
topic string
partition int32
}
func (tp *tp) String() string {
return fmt.Sprintf(`%s_%d`, tp.topic, tp.partition)
}
// Global Table Stream is a special type of stream which run in background in async manner and will
// create a partition consumer for each global table upstream topic+partition. Once the stream started it will sync all
// the tables up to broker latest offset
type globalTableStream struct {
tables map[string]*tableInstance
restartOnFailure bool
restartOnFailureCount int
logger log.Logger
}
type GlobalTableStreamConfig struct {
ConsumerBuilder consumer.PartitionConsumerBuilder
BackendBuilder backend.Builder
OffsetManager offsets.Manager
KafkaAdmin admin.KafkaAdmin
Metrics metrics.Reporter
Logger log.Logger
}
// newGlobalTableStream starts a
func newGlobalTableStream(tables map[string]*globalKTable, config *GlobalTableStreamConfig) (*globalTableStream, error) {
offsetBackend, err := config.BackendBuilder(offsetBackendName)
if err != nil {
return nil, errors.WithPrevious(err, `offset backend build failed`)
}
stream := &globalTableStream{
tables: make(map[string]*tableInstance),
logger: config.Logger.NewLog(log.Prefixed(`global-tables`)),
}
var topics []string
for t := range tables {
topics = append(topics, t)
}
// get partition information's for topics
info, err := config.KafkaAdmin.FetchInfo(topics)
if err != nil {
return nil, errors.WithPrevious(err, `cannot fetch topic info`)
}
consumedLatency := config.Metrics.Observer(metrics.MetricConf{
Path: `k_stream_global_table_stream_consumed_latency_microseconds`,
Labels: []string{`topic`, `partition`},
})
for _, topic := range info {
if topic.Error != nil && topic.Error != sarama.ErrNoError {
return nil, errors.WithPrevious(topic.Error, fmt.Sprintf(`cannot get topic info for %s`, topic.Name))
}
for i := int32(len(topic.Partitions)) - 1; i >= 0; i-- {
partitionConsumer, err := config.ConsumerBuilder.Build(
consumer.BuilderWithId(fmt.Sprintf(`global_table_consumer_%s_%d`, topic.Name, i)),
consumer.BuilderWithLogger(config.Logger.NewLog(log.Prefixed(fmt.Sprintf(`global-table.%s-%d`, topic.Name, i)))),
)
if err != nil {
return nil, errors.WithPrevious(err, `cannot build partition consumer`)
}
t := new(tableInstance)
t.tp.topic = topic.Name
t.tp.partition = i
t.config = tables[t.tp.topic]
t.offsetBackend = offsetBackend
t.offsetKey = []byte(t.tp.String())
t.store = tables[t.tp.topic].store
t.storeWriter = tables[t.tp.topic].options.backendWriter
t.recordVersionExtractor = tables[t.tp.topic].options.recordVersionExtractor
t.recordVersionComparator = tables[t.tp.topic].options.recordVersionComparator
t.restartOnFailure = true
t.restartOnFailureCount = 1
t.consumer = partitionConsumer
t.offsets = config.OffsetManager
t.logger = config.Logger.NewLog(log.Prefixed(fmt.Sprintf(`global-table.%s-%d`, t.tp.topic, t.tp.partition)))
t.metrics.consumedLatency = consumedLatency
stream.tables[t.tp.String()] = t
}
}
return stream, nil
}
// StartStreams starts all the tables
func (s *globalTableStream) StartStreams(runWg *sync.WaitGroup) {
s.logger.Info(`sync started...`)
defer s.logger.Info(`syncing completed`)
// create a waitgroup with the num of tables for table syncing
syncWg := new(sync.WaitGroup)
syncWg.Add(len(s.tables))
go func() {
// run waitgroup is for running table go routine
for _, table := range s.tables {
runWg.Add(1)
go func(t *tableInstance, syncWg *sync.WaitGroup) {
t.Init()
syncWg.Done()
// once the table stopped mark run waitgroup as done
<-t.stopped
runWg.Done()
}(table, syncWg)
}
}()
// method should be blocked until the syncing is done
syncWg.Wait()
s.printSyncInfo()
}
func (s *globalTableStream) printSyncInfo() {
for _, t := range s.tables {
t.print()
}
}
func (s *globalTableStream) stop() {
s.logger.Info(`streams closing...`)
defer s.logger.Info(`streams closed`)
wg := new(sync.WaitGroup)
wg.Add(len(s.tables))
for _, t := range s.tables {
go func(wg *sync.WaitGroup, t *tableInstance) {
defer wg.Done()
if err := t.consumer.Close(); err != nil {
t.logger.Error(err)
return
}
t.logger.Info(`stream closed`)
}(wg, t)
}
wg.Wait()
}
<|start_filename|>kstream/processors/processor.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package processors
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/topology"
)
type ProcessFunc func(ctx context.Context, key, value interface{}) error
type Processor struct {
Id int32
ProcessFunc ProcessFunc
childBuilders []topology.NodeBuilder
childs []topology.Node
}
func (p *Processor) Childs() []topology.Node {
return p.childs
}
func (p *Processor) ChildBuilders() []topology.NodeBuilder {
return p.childBuilders
}
func (p *Processor) AddChildBuilder(builder topology.NodeBuilder) {
p.childBuilders = append(p.childBuilders, builder)
}
func (p *Processor) AddChild(node topology.Node) {
p.childs = append(p.childs, node)
}
func (p *Processor) Run(ctx context.Context, kIn, vIn interface{}) (interface{}, interface{}, bool, error) {
err := p.ProcessFunc(ctx, kIn, vIn)
if err != nil {
return kIn, vIn, false, errors.WithPrevious(err, `process error`)
}
for _, child := range p.childs {
_, _, next, err := child.Run(ctx, kIn, vIn)
if err != nil || !next {
return nil, nil, false, err
}
}
return kIn, vIn, true, nil
}
func (p *Processor) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range p.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &Processor{
ProcessFunc: p.ProcessFunc,
childs: childs,
Id: p.Id,
}, nil
}
func (p *Processor) Next() bool {
return true
}
func (p *Processor) Name() string {
return `processor`
}
func (p *Processor) Type() topology.Type {
return topology.Type(`processor`)
}
func (p *Processor) ID() int32 {
return p.Id
}
<|start_filename|>kstream/store/hash_index_test.go<|end_filename|>
package store
import (
"github.com/google/uuid"
"reflect"
"strings"
"testing"
)
func TestNewIndex(t *testing.T) {
var mapper func(key, val interface{}) (idx string)
index := NewStringHashIndex(`foo`, mapper)
type args struct {
name string
mapper KeyMapper
}
tests := []struct {
name string
args args
want Index
}{
{name: `new`, args: struct {
name string
mapper KeyMapper
}{name: `foo`, mapper: mapper}, want: index},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := NewStringHashIndex(tt.args.name, tt.args.mapper); !reflect.DeepEqual(got, tt.want) {
t.Errorf("NewStringHashIndex() = %#v, want %#v", got, tt.want)
}
})
}
}
func TestHashIndex_Delete(t *testing.T) {
index := NewStringHashIndex(`foo9`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
if err := index.Write(`100`, `111,222`); err != nil {
t.Error(err)
}
if err := index.Delete(`100`, `111,222`); err != nil {
t.Error(err)
}
data, err := index.Read(`111`)
if err != nil {
t.Error(err)
}
if len(data) > 0 {
t.Fail()
}
}
func TestHashIndex_Name(t *testing.T) {
tests := []struct {
name string
idx Index
want string
}{
{
name: `name`,
idx: NewStringHashIndex(`foo`, nil),
want: `foo`},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := tt.idx.String(); got != tt.want {
t.Errorf("Name() = %v, want %v", got, tt.want)
}
})
}
}
func TestHashIndex_Read(t *testing.T) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
if err := index.Write(`100`, `111,222`); err != nil {
t.Error(err)
}
data, err := index.Read(`111`)
if err != nil {
t.Error(err)
}
if !reflect.DeepEqual(data, []interface{}{`100`}) {
t.Errorf("expect []interface{}{`100`} have %#v", data)
}
}
func TestHashIndex_Write(t *testing.T) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
if err := index.Write(`100`, `111,222`); err != nil {
t.Error(err)
}
data, err := index.Read(`111`)
if err != nil {
t.Error(err)
}
if !reflect.DeepEqual(data, []interface{}{`100`}) {
t.Errorf("expect []interface{}{`100`} have %#v", data)
}
}
func TestHashIndex_WriteUuidKey(t *testing.T) {
index := NewStringHashIndex(`foo`, func(key, val interface{}) (idx string) {
return strings.Split(val.(string), `,`)[0]
})
uid := uuid.New()
if err := index.Write(uid, `111,222`); err != nil {
t.Error(err)
}
data, err := index.Read(`111`)
if err != nil {
t.Error(err)
}
if !reflect.DeepEqual(data, []interface{}{uid}) {
t.Errorf("expect []interface{}{`100`} have %#v", data)
}
}
<|start_filename|>admin/mock_topics_test.go<|end_filename|>
package admin
import (
"fmt"
"github.com/tryfix/kstream/data"
"testing"
)
func TestMockPartition_Latest(t *testing.T) {
mocksTopics := NewMockTopics()
kafkaAdmin := &MockKafkaAdmin{
Topics: mocksTopics,
}
if err := kafkaAdmin.CreateTopics(map[string]*Topic{
`tp1`: {
Name: "tp1",
NumPartitions: 1,
ReplicationFactor: 1,
},
}); err != nil {
t.Error(err)
}
tp, _ := mocksTopics.Topic(`tp1`)
pt, _ := tp.Partition(0)
for i := 1; i <= 3333; i++ {
err := pt.Append(&data.Record{
Key: []byte(fmt.Sprint(i)),
Value: []byte(`v`),
Topic: "tp1",
})
if err != nil {
t.Error(err)
}
}
if pt.Latest() != 3332 {
t.Fail()
}
}
<|start_filename|>kstream/k_source.go<|end_filename|>
package kstream
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/encoding"
"github.com/tryfix/kstream/kstream/topology"
)
type kSource struct {
Id int32
keyEncoder encoding.Encoder
valEncoder encoding.Encoder
name string
topic string
}
type kSourceBuilder struct {
keyEncoderBuilder encoding.Builder
valEncoderBuilder encoding.Builder
name string
topic string
info map[string]string
}
func (b *kSourceBuilder) Build() (topology.Source, error) {
return &kSource{
name: b.name,
topic: b.topic,
keyEncoder: b.keyEncoderBuilder(),
valEncoder: b.valEncoderBuilder(),
}, nil
}
func (b *kSourceBuilder) Name() string {
return b.name
}
func (b *kSourceBuilder) SourceType() string {
return `kafka`
}
func (b *kSourceBuilder) Info() map[string]string {
return b.info
}
func (s *kSource) Name() string {
return s.name
}
func (s *kSource) Run(ctx context.Context, kIn, vIn []byte) (kOut, vOut interface{}, err error) {
return s.decodeRecord(kIn, vIn)
}
func (s *kSource) decodeRecord(key []byte, val []byte) (interface{}, interface{}, error) {
k, err := s.keyEncoder.Decode(key)
if err != nil {
return nil, nil, errors.WithPrevious(err, `key decode error`)
}
v, err := s.valEncoder.Decode(val)
if err != nil {
return nil, nil, errors.WithPrevious(err, `value decode error`)
}
return k, v, nil
}
func (s *kSource) Close() {}
func (s *kSource) Next() bool {
return true
}
func (s *kSource) Type() topology.Type {
return topology.TypeSource
}
<|start_filename|>kstream/k_flow.go<|end_filename|>
package kstream
//import (
// "github.com/tryfix/kstream/errors"
// "github.com/tryfix/kstream/flow"
// "github.com/tryfix/kstream/logger"
// "github.com/tryfix/kstream/processors"
// "github.com/tryfix/kstream/source_sink"
// "time"
//)
//
//type KFlow struct {
// source *kSource
// sink *kSink
// branches []*flow.Branch
// processors []processors.Processor
// retryCount int
// retryInterval time.Duration
// changelogEnabled bool
// errorHandler errors.ErrorHandler
//}
//
//type KFlowBranchBuilder struct {
// name string
// Builder *kFlowBuilder
// Predicate flow.BranchPredicate
// isParallel bool
//}
//
//type kFlowBuilder struct {
// sourceBuilder *kSourceBuilder
// sinkBuilder *kSinkBuilder
// branches []*KFlowBranchBuilder
// processors []processors.Processor
// retryCount int
// retryInterval time.Duration
// changelogEnabled bool
// errorHandler errors.ErrorHandler
//}
//
//func (b *kFlowBuilder) Build() (flow.Flow, error) {
//
// kFlow := &KFlow{
// changelogEnabled: b.changelogEnabled,
// errorHandler: b.errorHandler,
// processors: b.processors,
// retryCount: b.retryCount,
// retryInterval: b.retryInterval,
// }
//
// if b.sourceBuilder != nil {
// source, err := b.sourceBuilder.Build()
// if err != nil {
// return nil, err
// }
//
// kSource, ok := source.(*kSource)
// if !ok {
// logger.DefaultLogger.Fatal(`k-stream.kFlow`, `must be the type of kSource`)
// }
//
// kFlow.source = kSource
// }
//
// for _, branch := range b.branches {
//
// // Build branch
// flowBranch, err := branch.Builder.Build()
// if err != nil {
// logger.DefaultLogger.Fatal(`k-stream.kFlow`, err)
// }
//
// kFlow.branches = append(kFlow.branches, &flow.Branch{
// Predicate: branch.Predicate,
// Flow: flowBranch,
// })
// }
//
// if b.sinkBuilder != nil {
// sink, err := b.sinkBuilder.Build()
// if err != nil {
// return nil, err
// }
//
// kSink, ok := sink.(*kSink)
// if !ok {
// logger.DefaultLogger.Fatal(`k-stream.kFlow`, `must be the type of kSource`)
// }
//
// kFlow.sink = kSink
// }
//
// return kFlow, nil
//}
//
//func (f *KFlow) Source() source_sink.Source {
// return f.source
//}
//
//func (f *KFlow) Sink() source_sink.Sink {
// return f.sink
//}
//
//func (f *KFlow) Processors() []processors.Processor {
// return f.processors
//}
//
//func (f *KFlow) Branches() []*flow.Branch {
// return f.branches
//}
//
//func (f *KFlow) Sinkable() bool {
// return f.sink != nil
//}
//
//func (f *KFlow) OnError() errors.ErrorHandler {
// return f.errorHandler
//}
//
//func (f *KFlow) RetryCount() int {
// return f.retryCount
//}
//
//func (f *KFlow) RetryInterval() time.Duration {
// return f.retryInterval
//}
<|start_filename|>consumer/mock_consumer_partition.go<|end_filename|>
package consumer
import (
"github.com/tryfix/kstream/data"
)
type mockConsumerPartition struct {
tp TopicPartition
records chan *data.Record
}
func (m *mockConsumerPartition) Wait() chan<- bool {
return nil
}
func (m *mockConsumerPartition) Records() <-chan *data.Record {
return m.records
}
func (m *mockConsumerPartition) Partition() TopicPartition {
return m.tp
}
func (m *mockConsumerPartition) MarkOffset(offset int64) {}
func (m *mockConsumerPartition) CommitOffset(*data.Record) error {
return nil
}
<|start_filename|>kstream/processors/join/joiner.go<|end_filename|>
package join
import (
"context"
"github.com/tryfix/kstream/kstream/topology"
)
type Type int
const (
LeftJoin Type = iota
InnerJoin
)
type Joiner interface {
topology.Node
Join(ctx context.Context, key, val interface{}) (joinedVal interface{}, err error)
}
type KeyMapper func(key, value interface{}) (mappedKey interface{}, err error)
type ValueMapper func(left, right interface{}) (joined interface{}, err error)
<|start_filename|>producer/config.go<|end_filename|>
package producer
import (
"github.com/Shopify/sarama"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
)
type Config struct {
Id string
*sarama.Config
Pool struct {
NumOfWorkers int
}
BootstrapServers []string
RequiredAcks RequiredAcks
Partitioner Partitioner
Logger log.Logger
MetricsReporter metrics.Reporter
}
func NewConfig() *Config {
c := new(Config)
c.setDefaults()
return c
}
func (c *Config) validate() error {
if err := c.Config.Validate(); err != nil {
return err
}
return nil
}
func (c *Config) setDefaults() {
c.Config = sarama.NewConfig()
c.Producer.RequiredAcks = sarama.RequiredAcks(WaitForAll)
c.Producer.Return.Errors = true
c.Producer.Return.Successes = true
c.Logger = log.NewNoopLogger()
//c.Config.Version = sarama.V2_3_0_0
c.MetricsReporter = metrics.NoopReporter()
c.Producer.Compression = sarama.CompressionSnappy
if c.Partitioner == Manual {
c.Producer.Partitioner = sarama.NewManualPartitioner
}
if c.Partitioner == HashBased {
c.Producer.Partitioner = sarama.NewHashPartitioner
}
if c.Partitioner == Random {
c.Producer.Partitioner = sarama.NewRandomPartitioner
}
}
<|start_filename|>kstream/store/uuid_hash_index_test.go<|end_filename|>
package store
import (
"fmt"
"github.com/google/uuid"
"reflect"
"strings"
"testing"
)
func TestNewUUIDHashIndexIndex(t *testing.T) {
var mapper func(key, val interface{}) (idx uuid.UUID)
index := NewUuidHashIndex(`foo`, mapper)
type args struct {
name string
mapper func(key, val interface{}) (idx uuid.UUID)
}
tests := []struct {
name string
args args
want Index
}{
{name: `new`, args: struct {
name string
mapper func(key, val interface{}) (idx uuid.UUID)
}{name: `foo`, mapper: mapper}, want: index},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := NewUuidHashIndex(tt.args.name, tt.args.mapper); !reflect.DeepEqual(got, tt.want) {
t.Errorf("NewStringHashIndex() = %#v, want %#v", got, tt.want)
}
})
}
}
func TestUUIDHashIndex_Delete(t *testing.T) {
uid1 := uuid.New()
uid2 := uuid.New()
index := NewUuidHashIndex(`foo`, func(key, val interface{}) (idx uuid.UUID) {
uid, _ := uuid.Parse(strings.Split(val.(string), `,`)[0])
return uid
})
if err := index.Write(uid1, fmt.Sprintf(`%s,%s`, uid1.String(), uid2.String())); err != nil {
t.Error(err)
}
if err := index.Delete(uid1, fmt.Sprintf(`%s,%s`, uid1.String(), uid2.String())); err != nil {
t.Error(err)
}
data, err := index.Read(uid1)
if err != nil {
t.Error(err)
}
if len(data) > 0 {
t.Fail()
}
}
func TestUUIDHashIndex_Name(t *testing.T) {
tests := []struct {
name string
idx Index
want string
}{
{
name: `name`,
idx: NewUuidHashIndex(`foo`, nil),
want: `foo`},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := tt.idx.String(); got != tt.want {
t.Errorf("Name() = %v, want %v", got, tt.want)
}
})
}
}
func TestUUIDHashIndex_Read(t *testing.T) {
uid1 := uuid.New()
uid2 := uuid.New()
index := NewUuidHashIndex(`foo`, func(key, val interface{}) (idx uuid.UUID) {
uid, _ := uuid.Parse(strings.Split(val.(string), `,`)[0])
return uid
})
if err := index.Write(uid1, fmt.Sprintf(`%s,%s`, uid1.String(), uid2.String())); err != nil {
t.Error(err)
}
data, err := index.Read(uid1)
if err != nil {
t.Error(err)
}
if !reflect.DeepEqual(data, []interface{}{uid1}) {
t.Errorf("expect []interface{}{`100`} have %#v", data)
}
}
func TestUUIDHashIndex_Write(t *testing.T) {
uid1 := uuid.New()
uid2 := uuid.New()
index := NewUuidHashIndex(`foo`, func(key, val interface{}) (idx uuid.UUID) {
uid, _ := uuid.Parse(strings.Split(val.(string), `,`)[0])
return uid
})
if err := index.Write(uid1, fmt.Sprintf(`%s,%s`, uid1.String(), uid2.String())); err != nil {
t.Error(err)
}
data, err := index.Read(uid1)
if err != nil {
t.Error(err)
}
if !reflect.DeepEqual(data, []interface{}{uid1}) {
t.Errorf("expect []interface{}{`100`} have %#v", data)
}
}
<|start_filename|>kstream/branch/branch.go<|end_filename|>
package branch
import (
"context"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/topology"
)
type Predicate func(ctx context.Context, key interface{}, val interface{}) (bool, error)
type Details struct {
Name string
Predicate Predicate
}
type Splitter struct {
Id int32
Branches []topology.Node
BranchBuilders []topology.NodeBuilder
}
func (bs *Splitter) ChildBuilders() []topology.NodeBuilder {
return bs.BranchBuilders
}
func (bs *Splitter) Childs() []topology.Node {
return bs.Branches
}
func (bs *Splitter) AddChildBuilder(builder topology.NodeBuilder) {
bs.BranchBuilders = append(bs.BranchBuilders, builder)
}
func (bs *Splitter) AddChild(node topology.Node) {
bs.Branches = append(bs.Branches, node)
}
func (bs *Splitter) Build() (topology.Node, error) {
var branches []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range bs.BranchBuilders {
branch, err := childBuilder.Build()
if err != nil {
return nil, err
}
branches = append(branches, branch)
}
return &Splitter{
Branches: branches,
Id: bs.Id,
}, nil
}
func (bs *Splitter) Next() bool {
return true
}
func (bs *Splitter) ID() int32 {
return bs.Id
}
func (bs *Splitter) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error) {
for _, b := range bs.Branches {
branch, _ := b.(*Branch)
ok, err := branch.Predicate(ctx, kIn, vIn)
if err != nil {
return nil, nil, false, errors.WithPrevious(err, `predicate error`)
}
if ok {
_, _, next, err := branch.Run(ctx, kIn, vIn)
if err != nil || !next {
return nil, nil, false, err
}
break
}
}
return kIn, kOut, true, nil
}
func (bs *Splitter) Type() topology.Type {
return topology.Type(`branch_splitter`)
}
type Branch struct {
Id int32
Name string
Predicate Predicate
childBuilders []topology.NodeBuilder
childs []topology.Node
}
func (b *Branch) Childs() []topology.Node {
return b.childs
}
func (b *Branch) ChildBuilders() []topology.NodeBuilder {
return b.childBuilders
}
func (b *Branch) AddChildBuilder(builder topology.NodeBuilder) {
b.childBuilders = append(b.childBuilders, builder)
}
func (b *Branch) AddChild(node topology.Node) {
b.childs = append(b.childs, node)
}
func (b *Branch) Build() (topology.Node, error) {
var childs []topology.Node
//var childBuilders []node.NodeBuilder
for _, childBuilder := range b.childBuilders {
child, err := childBuilder.Build()
if err != nil {
return nil, err
}
childs = append(childs, child)
}
return &Branch{
Name: b.Name,
Predicate: b.Predicate,
childs: childs,
Id: b.Id,
}, nil
}
func (b *Branch) Next() bool {
return true
}
func (b *Branch) ID() int32 {
return b.Id
}
func (b *Branch) Run(ctx context.Context, kIn, vIn interface{}) (kOut, vOut interface{}, cont bool, err error) {
for _, child := range b.childs {
_, _, next, err := child.Run(ctx, kIn, vIn)
if err != nil || !next {
return nil, nil, false, err
}
}
return kIn, kOut, true, nil
}
func (b *Branch) Type() topology.Type {
return topology.TypeBranch
}
<|start_filename|>kstream/changelog/replica_syncer.go<|end_filename|>
package changelog
import (
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"github.com/tryfix/log"
)
type replicaSyncer struct {
cache *cache
tp consumer.TopicPartition
consumer consumer.PartitionConsumer
syncing bool
logger log.Logger
running bool
}
func (rs *replicaSyncer) Sync(startOffset int64) (started chan bool, syncErrors chan error) {
started = make(chan bool)
syncErrors = make(chan error)
go rs.initSync(startOffset, started, syncErrors)
return started, syncErrors
}
func (rs *replicaSyncer) initSync(startOffset int64, started chan bool, syncErrors chan error) {
if startOffset == 0 {
startOffset = int64(consumer.Earliest)
}
events, err := rs.consumer.Consume(rs.tp.Topic, rs.tp.Partition, consumer.Offset(startOffset))
if err != nil {
syncErrors <- errors.WithPrevious(err, fmt.Sprintf(`cannot read partition %s[%d]`,
rs.tp.Topic, rs.tp.Partition))
return
}
rs.logger.Info(fmt.Sprintf(`partition consumer started at offset [%d]`, startOffset))
started <- true
rs.syncing = true
for event := range events {
switch ev := event.(type) {
case *data.Record:
if err := rs.cache.Put(ev); err != nil {
syncErrors <- errors.WithPrevious(err, `writing to cache failed`)
}
case *consumer.PartitionEnd:
rs.logger.Info(fmt.Sprintf(`replica sync completed for [%s]`, rs.tp))
case *consumer.Error:
rs.logger.Error(err)
}
}
close(started)
close(syncErrors)
rs.syncing = false
rs.running = true
}
func (rs *replicaSyncer) Stop() error {
if rs.running {
rs.logger.Info(`sync not running`)
return nil
}
return rs.consumer.Close()
}
<|start_filename|>kstream/changelog/cache.go<|end_filename|>
package changelog
import (
"encoding/binary"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/backend"
"github.com/tryfix/kstream/consumer"
"github.com/tryfix/kstream/data"
"sync"
)
type cacheManager struct {
caches map[string]*cache
mu *sync.Mutex
backendBuilder backend.Builder
cacheOffsetStorage backend.Backend
}
func newCacheManager(backendBuilder backend.Builder) (*cacheManager, error) {
m := &cacheManager{
caches: make(map[string]*cache),
mu: new(sync.Mutex),
backendBuilder: backendBuilder,
}
offsetBackend, err := backendBuilder(`__changelog_cache_offsets`)
if err != nil {
return nil, err
}
//if !offsetBackend.Persistent() {
// return nil, errors.New( `only persistent backend are supported`)
//}
m.cacheOffsetStorage = offsetBackend
return m, nil
}
func (m *cacheManager) getCache(tp consumer.TopicPartition) (*cache, error) {
if c, ok := m.caches[tp.String()]; ok {
return c, nil
}
b, err := m.backendBuilder(`__changelog_cache_` + tp.String())
if err != nil {
return nil, err
}
cache := new(cache)
cache.tp = tp
cache.backend = b
cache.offsetBackend = m.cacheOffsetStorage
m.mu.Lock()
m.caches[tp.String()] = cache
m.mu.Unlock()
return cache, nil
}
type cache struct {
backend backend.Backend
offsetBackend backend.Backend
tp consumer.TopicPartition
}
func (c *cache) Flush() error {
itr := c.backend.Iterator()
for itr.Valid() {
if err := c.backend.Delete(itr.Key()); err != nil {
return errors.WithPrevious(err, `cache flush failed`)
}
itr.Next()
}
return nil
}
func (c *cache) Put(record *data.Record) error {
if len(record.Value) < 1 {
if err := c.backend.Delete(record.Key); err != nil {
return err
}
} else {
if err := c.backend.Set(record.Key, record.Value, 0); err != nil {
return err
}
}
// update current offset on backend
return c.offsetBackend.Set([]byte(c.offsetKeyPrefix()), c.encodeOffset(record.Offset), 0)
}
func (c *cache) offsetKeyPrefix() string {
return `__changelog_offset_cache_last_synced_` + c.tp.String()
}
func (c *cache) ReadAll() []*data.Record {
var records []*data.Record
i := c.backend.Iterator()
i.SeekToFirst()
for i.Valid() {
record := &data.Record{
Key: i.Key(),
Value: i.Value(),
Topic: c.tp.Topic,
Partition: c.tp.Partition,
}
records = append(records, record)
i.Next()
}
return records
}
func (c *cache) Delete(record *data.Record) error {
return c.backend.Delete(record.Key)
}
func (c *cache) decodeOffset(offset []byte) int64 {
return int64(binary.LittleEndian.Uint64(offset))
}
func (c *cache) encodeOffset(offset int64) []byte {
byt := make([]byte, 8)
binary.LittleEndian.PutUint64(byt, uint64(offset))
return byt
}
func (c *cache) LastSynced() (int64, error) {
byt, err := c.offsetBackend.Get([]byte(c.offsetKeyPrefix()))
if err != nil {
return 0, err
}
if len(byt) < 1 {
return 0, nil
}
return c.decodeOffset(byt), nil
}
<|start_filename|>admin/mock_kafka_admin.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package admin
type MockKafkaAdmin struct {
Topics *Topics
}
func NewMockAdminWithTopics(tps map[string]*Topic) *MockKafkaAdmin {
topics := NewMockTopics()
admin := &MockKafkaAdmin{Topics: topics}
admin.CreateTopics(tps)
return admin
}
func (m *MockKafkaAdmin) FetchInfo(topics []string) (map[string]*Topic, error) {
tps := make(map[string]*Topic)
for _, topic := range topics {
info, err := m.Topics.Topic(topic)
if err != nil {
return nil, err
}
tps[topic] = info.Meta
}
return tps, nil
}
func (m *MockKafkaAdmin) CreateTopics(topics map[string]*Topic) error {
for name, topic := range topics {
if err := m.createTopic(name, topic); err != nil {
return err
}
}
return nil
}
func (m *MockKafkaAdmin) createTopic(name string, info *Topic) error {
topic := &MockTopic{
Name: name,
Meta: info,
}
err := m.Topics.AddTopic(topic)
if err != nil {
return err
}
return nil
}
func (m *MockKafkaAdmin) DeleteTopics(topics []string) (map[string]error, error) {
for _, tp := range topics {
if err := m.Topics.RemoveTopic(tp); err != nil {
return nil, err
}
}
return nil, nil
}
func (m *MockKafkaAdmin) Close() {}
<|start_filename|>backend/backend.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package backend
import (
"time"
)
type Builder func(name string) (Backend, error)
type Backend interface {
Name() string
Set(key []byte, value []byte, expiry time.Duration) error
Get(key []byte) ([]byte, error)
RangeIterator(fromKy []byte, toKey []byte) Iterator
Iterator() Iterator
Delete(key []byte) error
SetExpiry(time time.Duration)
String() string
Persistent() bool
Close() error
Destroy() error
}
<|start_filename|>examples/example_1/encoders/transaction_encoder.go<|end_filename|>
package encoders
import (
"encoding/json"
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/examples/example_1/events"
)
type TransactionEncoder struct {
ID string `json:"id"`
Type string `json:"type"`
Body interface{} `json:"body"`
Timestamp int64 `json:"timestamp"`
}
func (t TransactionEncoder) Encode(data interface{}) ([]byte, error) {
panic("implement me")
}
func (t TransactionEncoder) Decode(data []byte) (interface{}, error) {
te := TransactionEncoder{}
err := json.Unmarshal(data, &te)
if err != nil {
return nil, err
}
switch te.Type {
case `account_credited`:
ac := events.AccountCredited{}
err := json.Unmarshal(data, &ac)
if err != nil {
return nil, err
}
return ac, nil
case `account_debited`:
ad := events.AccountDebited{}
err := json.Unmarshal(data, &ad)
if err != nil {
return nil, err
}
return ad, nil
default:
return nil, errors.New(fmt.Sprintf(`unexpected type received :- %v`, te.Type))
}
}
<|start_filename|>kstream/worker_pool/pool_bench_test.go<|end_filename|>
package worker_pool
import (
"context"
"fmt"
"math/rand"
"testing"
)
var pOrdered *Pool
var pRandom *Pool
func init() {
//builder := node.NewMockTopologyBuilder(10, 10)
//
//pl := NewPool(`1`, builder, metrics.NoopReporter(), &PoolConfig{
// NumOfWorkers: 1000,
// Order: OrderByKey,
// WorkerBufferSize: 10000,
//})
//pOrdered = pl
//
//plRd := NewPool(`2`, builder, metrics.NoopReporter(), &PoolConfig{
// NumOfWorkers: 1000,
// Order: OrderRandom,
// WorkerBufferSize: 10000,
//})
//pRandom = plRd
}
func BenchmarkPool_Run_Random(b *testing.B) {
b.RunParallel(func(pb *testing.PB) {
k := rand.Intn(8) + 1
for pb.Next() {
pRandom.Run(context.Background(), []byte(`200`), []byte(fmt.Sprintf(`br_%d`, k)), func() {})
}
})
}
func BenchmarkPool_Run_Ordered(b *testing.B) {
b.RunParallel(func(pb *testing.PB) {
k := rand.Intn(8) + 1
for pb.Next() {
pOrdered.Run(context.Background(), []byte(`200`), []byte(fmt.Sprintf(`br_%d`, k)), func() {})
}
})
}
//func BenchmarkPool_Run_Ordered(b *testing.B) {
//
// var count int
//
// f := new(flow.MockFlow)
// var processor processors.ProcessFunc = func(ctx context.Context, key interface{}, value interface{}) error {
//
// v, ok := key.(int)
// if !ok {
// b.Error(fmt.Sprintf(`expected [int] have [%+v]`, reflect.TypeOf(key)))
// }
//
// count = v
//
// return nil
// }
//
// f.ProcessorsM = append(f.ProcessorsM, processor)
// executor := flow.NewPlowExecutor(f, logger.DefaultLogger)
//
// p := NewPool(executor, &encoding.IntEncoder{}, &encoding.IntEncoder{}, &PoolConfig{
// NumOfWorkers: 20,
// Order: OrderByKey,
// WorkerBufferSize: 10,
// })
//
// b.RunParallel(func(pb *testing.PB) {
// for pb.Next() {
// done := p.Run(context.Background(), []byte(string(`100`)), []byte(string(`100`)))
// <-done
// }
// })
//
// if count != 100 {
// b.Fail()
// }
//
//}
<|start_filename|>kstream/offsets/manager.go<|end_filename|>
package offsets
import (
"fmt"
"github.com/Shopify/sarama"
"github.com/tryfix/log"
)
type Manager interface {
OffsetValid(topic string, partition int32, offset int64) (isValid bool, err error)
GetOffsetLatest(topic string, partition int32) (offset int64, err error)
GetOffsetOldest(topic string, partition int32) (offset int64, err error)
Close() error
}
type Config struct {
Config *sarama.Config
BootstrapServers []string
Logger log.Logger
}
type manager struct {
client sarama.Client
}
func NewManager(config *Config) Manager {
logger := config.Logger.NewLog(log.Prefixed(`offset-manager`))
client, err := sarama.NewClient(config.BootstrapServers, config.Config)
if err != nil {
logger.Fatal(fmt.Sprintf(`cannot initiate builder deu to [%+v]`, err))
}
return &manager{client: client}
}
func (m *manager) OffsetValid(topic string, partition int32, offset int64) (isValid bool, err error) {
isValid, _, err = m.validate(topic, partition, offset)
return
}
func (m *manager) GetOffsetLatest(topic string, partition int32) (offset int64, err error) {
partitionStart, err := m.client.GetOffset(topic, partition, sarama.OffsetNewest)
if err != nil {
return offset, fmt.Errorf(`cannot get latest offset for %s-%d due to %w`, topic, partition, err)
}
return partitionStart, nil
}
func (m *manager) GetOffsetOldest(topic string, partition int32) (offset int64, err error) {
partitionStart, err := m.client.GetOffset(topic, partition, sarama.OffsetOldest)
if err != nil {
return offset, fmt.Errorf(`cannot get oldes offset for %s-%d due to %w`, topic, partition, err)
}
return partitionStart, nil
}
func (m *manager) Close() error {
return m.client.Close()
}
func offsetValid(offset, bkStart, bkEnd int64) bool {
return offset >= bkStart && offset < bkEnd
}
func (m *manager) validate(topic string, partition int32, offset int64) (isValid bool, valid int64, err error) {
startOffset, err := m.GetOffsetLatest(topic, partition)
if err != nil {
return false, 0, fmt.Errorf(`offset validate failed for %s-%d due to %w`, topic, partition, err)
}
endOffset, err := m.client.GetOffset(topic, partition, sarama.OffsetNewest)
if err != nil {
return false, 0, fmt.Errorf(`offset validate failed for %s-%d due to %w`, topic, partition, err)
}
return offsetValid(offset, startOffset, endOffset), startOffset, nil
}
<|start_filename|>kstream/changelog/buffer.go<|end_filename|>
/**
* Copyright 2020 TryFix Engineering.
* All rights reserved.
* Authors:
* <NAME> (<EMAIL>)
*/
package changelog
import (
"context"
"github.com/tryfix/kstream/data"
"github.com/tryfix/kstream/producer"
"github.com/tryfix/log"
"github.com/tryfix/metrics"
"sync"
"time"
)
// Buffer holds a temporary changelog Buffer
type Buffer struct {
records []*data.Record
mu *sync.Mutex
shouldFlush chan bool
flushInterval time.Duration
bufferSize int
logger log.Logger
producer producer.Producer
lastFlushed time.Time
metrics struct {
flushLatency metrics.Observer
}
}
// NewBuffer creates a new Buffer object
func NewBuffer(p producer.Producer, size int, flushInterval time.Duration, logger log.Logger) *Buffer {
flush := 1 * time.Second
if flushInterval != 0 {
flush = flushInterval
}
b := &Buffer{
records: make([]*data.Record, 0, size),
mu: new(sync.Mutex),
producer: p,
bufferSize: size,
logger: logger,
shouldFlush: make(chan bool, 1),
flushInterval: flush,
lastFlushed: time.Now(),
}
go b.runFlusher()
return b
}
// Clear clears the Buffer
func (b *Buffer) Clear() {
b.mu.Lock()
defer b.mu.Unlock()
if err := b.flushAll(); err != nil {
b.logger.ErrorContext(context.Background(), `k-stream.changelog.buffer`, err)
}
}
func (b *Buffer) Records() []*data.Record {
b.mu.Lock()
defer b.mu.Unlock()
return b.records
}
// Store stores the record in Buffer
func (b *Buffer) Store(record *data.Record) {
b.mu.Lock()
defer b.mu.Unlock()
b.records = append(b.records, record)
if len(b.records) >= b.bufferSize {
b.flush()
}
}
func (b *Buffer) runFlusher() {
//tic := time.NewTicker(b.flushInterval)
//defer tic.Stop()
//
//for range tic.C {
//
// if time.Since(b.lastFlushed) <= b.flushInterval {
// continue
// }
//
// b.mu.Lock()
// if len(b.records) > 0 {
// b.flush()
// }
// b.mu.Unlock()
//
//}
}
func (b *Buffer) flush() {
if err := b.flushAll(); err != nil {
b.logger.ErrorContext(context.Background(), `k-stream.changelog.buffer`, err)
}
b.logger.Trace(`k-stream.changelog.buffer`, `buffer flushed`)
}
func (b *Buffer) flushAll() error {
begin := time.Now()
defer func(t time.Time) {
b.metrics.flushLatency.Observe(float64(time.Since(begin).Nanoseconds()/1e3), nil)
}(begin)
// publish buffer to kafka and clear on success
//deDuplicated := deDuplicate(b.records)
//if len(deDuplicated) > 0 {
// if err := b.producer.ProduceBatch(context.Background(), deDuplicated); err != nil {
// return err
// }
//}
if err := b.producer.ProduceBatch(context.Background(), b.records); err != nil {
return err
}
b.reset()
return nil
}
func (b *Buffer) Delete(record *data.Record) {
record.Value = nil
b.Store(record)
}
func (b *Buffer) reset() {
b.records = make([]*data.Record, 0, b.bufferSize)
b.lastFlushed = time.Now()
}
func (b *Buffer) Close() {
// flush existing buffer
b.logger.Info(`k-stream.changelog.buffer`, `flushing buffer...`)
if err := b.flushAll(); err != nil {
b.logger.ErrorContext(context.Background(), `k-stream.changelog.buffer`, err)
}
}
<|start_filename|>kstream/processors/join/repartition.go<|end_filename|>
package join
import (
"fmt"
"github.com/tryfix/errors"
"github.com/tryfix/kstream/kstream/encoding"
)
type Side int
const (
LeftSide Side = iota + 1
RightSide
)
type RepartitionTopic struct {
Name string
Suffix string
ReplicationFactor int
NumOfPartitions int
MinInSycReplicas int
}
type Repartition struct {
Enable bool
StreamSide Side
KeyEncoder encoding.Builder
ValueEncoder encoding.Builder
Topic RepartitionTopic
}
type RepartitionOptions struct {
LeftTopic func(string) string
RightTopic func(string) string
LeftRepartition Repartition
RightRepartition Repartition
}
type RepartitionOption func(sink *RepartitionOptions)
func RepartitionLeftStream(keyEncodingBuilder, valueEncodingBuilder encoding.Builder) RepartitionOption {
return func(opts *RepartitionOptions) {
opts.LeftRepartition = Repartition{
Enable: true,
StreamSide: LeftSide,
KeyEncoder: keyEncodingBuilder,
ValueEncoder: valueEncodingBuilder,
}
}
}
func RepartitionRightStream(keyEncodingBuilder, valueEncodingBuilder encoding.Builder) RepartitionOption {
return func(opts *RepartitionOptions) {
opts.RightRepartition = Repartition{
Enable: true,
StreamSide: RightSide,
KeyEncoder: keyEncodingBuilder,
ValueEncoder: valueEncodingBuilder,
}
}
}
func (iOpts *RepartitionOptions) Apply(options ...RepartitionOption) {
for _, o := range options {
o(iOpts)
}
}
func (r Repartition) Validate(s Side) error {
fmt.Println(r)
if r.StreamSide != s {
return errors.New(`stream side is not compatible`)
}
if r.KeyEncoder == nil {
return errors.New(`repartition key encoder can not be nil`)
}
if r.ValueEncoder == nil {
return errors.New(`repartition value encoder can not be nil`)
}
//if r.Topic.Name == `` {
// return errors.New( `repartition topic can not be empty`)
//}
return nil
}
<|start_filename|>kstream/store/store_bench_test.go<|end_filename|>
package store
import (
"context"
"math/rand"
"testing"
)
func BenchmarkDefaultStore_Set(b *testing.B) {
store := makeTestStore(0)
ctx := context.Background()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := store.Set(ctx, rand.Intn(10000000), `100`, 0); err != nil {
b.Error(err)
}
}
})
}
func BenchmarkDefaultStore_Get(b *testing.B) {
store := makeTestStore(0)
ctx := context.Background()
for i := 1; i < 999999; i++ {
if err := store.Set(ctx, rand.Intn(i), `100`, 0); err != nil {
b.Error(err)
}
}
b.ResetTimer()
b.StartTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if _, err := store.Get(ctx, rand.Intn(999998)+1); err != nil {
b.Error(err)
}
}
})
}
func BenchmarkDefaultStore_Delete(b *testing.B) {
store := makeTestStore(0)
ctx := context.Background()
for i := 1; i <= 999999; i++ {
if err := store.Set(ctx, rand.Intn(i), `100`, 0); err != nil {
b.Error(err)
}
}
b.ResetTimer()
b.StartTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
if err := store.Delete(ctx, rand.Intn(999998)+1); err != nil {
b.Error(err)
}
}
})
}
| gmbyapa/kstream |
<|start_filename|>Makefile<|end_filename|>
VERSION = $(shell grep 'Version:' _oasis | sed 's/Version: *//')
VFILE = lib/version.ml
SETUP = ocaml setup.ml
build: setup.data $(VFILE)
$(SETUP) -build $(BUILDFLAGS)
doc: setup.data build
$(SETUP) -doc $(DOCFLAGS)
test: setup.data build
$(SETUP) -test $(TESTFLAGS)
all: $(VFILE)
$(SETUP) -all $(ALLFLAGS)
install: setup.data
$(SETUP) -install $(INSTALLFLAGS)
uninstall: setup.data
$(SETUP) -uninstall $(UNINSTALLFLAGS)
reinstall: setup.data
$(SETUP) -reinstall $(REINSTALLFLAGS)
clean:
$(SETUP) -clean $(CLEANFLAGS)
rm -f $(VFILE)
distclean:
$(SETUP) -distclean $(DISTCLEANFLAGS)
setup.data:
$(SETUP) -configure $(CONFIGUREFLAGS)
configure:
$(SETUP) -configure $(CONFIGUREFLAGS)
.PHONY: build doc test all install uninstall reinstall clean distclean configure
$(VFILE): _oasis
echo "(** CISO Version. *)" > $@
echo "" >> $@
echo "let current = \"$(VERSION)\"" >> $@
echo "(** The current version of CISO. *)" >> $@
init-doc:
mkdir -p gh-pages
cd gh-pages && ( \
git init && \
git remote add origin git@github.com:samoht/ciso.git && \
git fetch && \
git checkout -b gh-pages && \
(git pull origin gh-pages || exit 0))
update-doc: doc
rm -f gh-pages/*.html
cd gh-pages && cp ../ciso.docdir/*.html .
cd gh-pages && git add * && (git commit -a -m "Update docs" || exit 0)
cd gh-pages && git push origin gh-pages
| samoht/ciso |
<|start_filename|>src/index.css<|end_filename|>
body {
margin: 0;
padding: 0;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
min-height: 100%;
font-family: Gotham Rounded A, Gotham Rounded B, -apple-system, BlinkMacSystemFont, 'Segoe UI',
'Roboto', 'Oxygen', 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
sans-serif;
font-size: 16px;
line-height: 1.5;
color: #4d5659;
text-rendering: optimizeSpeed;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', monospace;
}
[hidden] {
display: none;
}
a {
background-color: transparent;
text-decoration: none;
}
a:active,
a:hover {
outline: 0;
}
img {
border: 0;
}
svg:not(:root) {
overflow: hidden;
}
button,
input {
color: inherit;
font: inherit;
margin: 0;
}
button {
overflow: visible;
}
button {
text-transform: none;
}
button,
html input[type='button'],
input[type='reset'],
input[type='submit'] {
-webkit-appearance: button;
cursor: pointer;
}
button[disabled],
html input[disabled] {
cursor: default;
}
button::-moz-focus-inner,
input::-moz-focus-inner {
border: 0;
padding: 0;
}
input {
line-height: normal;
}
*,
*:after,
*:before {
box-sizing: border-box;
}
p {
margin-top: 0;
margin-bottom: 30px;
}
img,
video {
max-width: 100%;
}
html {
box-sizing: border-box;
}
*,
*:before,
*:after {
box-sizing: inherit;
}
* {
margin: 0;
padding: 0;
}
<|start_filename|>generate-emoji-shortcodes.js<|end_filename|>
const fetch = require('node-fetch');
const fs = require('fs');
const prettier = require('prettier');
function writeFile(path, contents) {
return new Promise((resolve, reject) => {
fs.writeFile(path, contents, err => {
if (err) reject(err);
resolve();
});
});
}
async function writeMapping(data) {
const contents = `const emojiShortcodes = ${JSON.stringify(data, null, 2)};
export type Shortcode = keyof typeof emojiShortcodes;
export default emojiShortcodes;`;
const configFile = await prettier.resolveConfigFile(
'src/utils/emojiShortcodes.ts'
);
const config = await prettier.resolveConfig(configFile);
const formattedContents = prettier.format(contents, {
...config,
parser: 'typescript'
});
await writeFile('src/utils/emojiShortcodes.ts', formattedContents);
}
function fullyQualified(line) {
return line.includes('; fully-qualified');
}
function slugify(text) {
return text
.toString()
.toLowerCase()
.replace(/\s+/g, '-') // Replace spaces with -
.replace(/[^\w\-]+/g, '') // Remove all non-word chars
.replace(/\-\-+/g, '-') // Replace multiple - with single -
.replace(/^-+/, '') // Trim - from start of text
.replace(/-+$/, ''); // Trim - from end of text
}
function parseEmoji(l) {
const components = l
.split(';')[0]
.trim()
.split(' ');
const emoji = components
.map(s => String.fromCodePoint(parseInt(s, 16)))
.join('');
const description = l.split('#')[1];
const shortcode = `:${slugify(description)}:`;
return { shortcode, emoji };
}
(async () => {
const res = await fetch(
'https://unicode.org/Public/emoji/12.0/emoji-test.txt'
);
const rawData = await res.text();
const lines = rawData.split('\n');
const parsedEmoji = lines.filter(fullyQualified).map(parseEmoji);
const mapping = parsedEmoji.reduce((a, b) => {
a[b.shortcode] = b.emoji;
return a;
}, {});
await writeMapping(mapping);
})();
| danielkim107/simplewebrtc-talky-sample-app |
<|start_filename|>index.js<|end_filename|>
module.exports = require('./lib/PDFMerger');
| flowaccount/easy-pdf-merge |
<|start_filename|>mix.exs<|end_filename|>
defmodule PlugProxy.Mixfile do
use Mix.Project
@version "0.4.0"
@github_link "https://github.com/tommy351/plug-proxy"
def project do
[
app: :plug_proxy,
version: @version,
elixir: "~> 1.6",
description: "A plug for reverse proxy server",
elixirc_paths: elixirc_paths(Mix.env()),
deps: deps(),
package: package(),
lockfile: lockfile(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: preferred_cli_env(),
source_url: @github_link,
homepage_url: @github_link,
docs: [main: "PlugProxy"]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp lockfile do
case System.get_env("COWBOY_VERSION") do
"1" <> _ -> "mix-cowboy1.lock"
_ -> "mix.lock"
end
end
defp deps do
[
{:cowboy, "~> 1.0 or ~> 2.4"},
{:plug, "~> 1.5"},
{:hackney, "~> 1.10"},
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:excoveralls, "~> 0.7", only: :test, runtime: false},
{:inch_ex, "~> 0.5", only: [:dev, :test], runtime: false}
]
end
defp package do
[
maintainers: ["<NAME>"],
licenses: ["MIT License"],
links: %{"GitHub" => @github_link}
]
end
defp preferred_cli_env do
[
coveralls: :test,
"coveralls.travis": :test
]
end
end
<|start_filename|>lib/plug_proxy/response.ex<|end_filename|>
defmodule PlugProxy.Response do
import Plug.Conn
alias PlugProxy.{BadGatewayError, GatewayTimeoutError}
@type headers :: [{String.t(), String.t()}]
@doc """
Extract information from response headers.
"""
@spec process_headers(headers) :: {headers, integer | :chunked}
def process_headers(headers) do
process_headers(headers, [], 0)
end
defp process_headers([], acc, length) do
{Enum.reverse(acc), length}
end
defp process_headers([{key, value} | tail], acc, length) do
process_headers(String.downcase(key), value, tail, acc, length)
end
defp process_headers("content-length", value, headers, acc, length) do
length =
case Integer.parse(value) do
{int, ""} -> int
_ -> length
end
process_headers(headers, acc, length)
end
defp process_headers("transfer-encoding", "chunked", headers, acc, _) do
process_headers(headers, acc, :chunked)
end
defp process_headers(key, value, headers, acc, length) do
process_headers(headers, [{key, value} | acc], length)
end
@doc """
Run all before_send callbacks and set the connection state.
"""
@spec before_send(Plug.Conn.t(), term) :: Plug.Conn.t()
def before_send(%Plug.Conn{before_send: before_send} = conn, state) do
conn = Enum.reduce(before_send, conn, & &1.(&2))
%{conn | state: state}
end
@doc """
Reads data from the client and sends the chunked response.
"""
@spec chunked_reply(Plug.Conn.t(), :hackney.client_ref()) :: Plug.Conn.t()
def chunked_reply(conn, client) do
send_chunked(conn, conn.status)
|> do_chunked_reply(client)
end
defp do_chunked_reply(conn, client) do
case :hackney.stream_body(client) do
{:ok, data} ->
{:ok, conn} = chunk(conn, data)
do_chunked_reply(conn, client)
:done ->
conn
{:error, err} ->
raise BadGatewayError, reason: err
end
end
@doc """
Reads data from the client and sends the response.
"""
@spec reply(Plug.Conn.t(), :hackney.client_ref()) :: Plug.Conn.t()
def reply(conn, client) do
case :hackney.body(client) do
{:ok, body} ->
send_resp(conn, conn.status, body)
{:error, :timeout} ->
raise GatewayTimeoutError, reason: :read
{:error, err} ->
raise BadGatewayError, reason: err
end
end
end
| Legrandk/plug-proxy |
<|start_filename|>scripts/gen-release-notes.js<|end_filename|>
const execa = require("execa");
const moment = require("moment");
const _ = require("lodash");
const YAML = require("yamljs");
const fs = require("fs-extra");
const { parseCommit } = require("parse-commit-message");
async function gitLogs(repoPath) {
const gitTagCmd = `git describe --abbrev=0 --tags`;
const { stdout: gitTag } = await execa.command(gitTagCmd, {
cwd: repoPath,
shell: true,
});
console.log(gitTag);
// v0.5.13
const [major, minor, patch] = gitTag.split(".");
const gitTagAnchor = `${major}.${minor}.0`;
console.log({ gitTagAnchor });
const command = `git log ${gitTagAnchor}..HEAD --format=\'%C(auto) %h %s\'`;
const out = await execa.command(command, { cwd: repoPath, shell: true });
// const re = new RegExp(/(?<type>(^[:\(])+)?)(\((?<topic>.+)\))?:?/);
// const parser = conventionalCommitsParser();
// sync()
console.log("parsing commits...");
const commits = out.stdout
.split("\n")
.map((line) => {
// const out = sync(_.trim(line));
console.log(line);
const [skip, chash, ...ccommit] = line.split(" ");
console.log(ccommit);
const tmp = ccommit.join(" ");
console.log(tmp);
try {
const out = parseCommit(tmp);
const { type: ctype, scope: topic, subject: cmsg } = out.header;
// const match = re.exec(ctype);
// const {type, topic} = match.groups
// return { chash, ctype: type, topic, cmsg: cmsg.join(" ") }
return { chash, ctype, topic, cmsg };
} catch (err) {
console.log({ err });
return {};
}
})
.filter((ent) => !_.isEmpty(ent));
const tag = { major, minor, patch };
return { commits, tag };
}
async function cleanCommits(commits) {
return _.reject(commits, (ent) => {
return _.some(
["chore", "docs", "spike", "tests", "refactor", "invest", "qa"],
(bad) => ent.ctype.startsWith(bad)
);
});
}
function addExtraFields(commits) {
return commits.map((c) => {
let out = _.defaults(c, { topic: "", desc: "", tags: [], docs: "" });
if (_.isNull(out.topic)) {
out.topic = "misc"
}
out.ctype = {"feat": 0, "enhance":1, "fix":2}[out.ctype] + "-" + out.ctype;
return out
});
}
async function main() {
const stats = {
numCommits: 0,
};
const { commits, tag } = await gitLogs(
"/Users/kevinlin/projects/dendronv2/dendron"
);
stats.numCommits = commits.length;
let commitsClean = await cleanCommits(commits);
commitsClean = _.sortBy(commitsClean, ent => {
return ent.topic + ["feat", "enhance", "fix"].indexOf(ent.ctype)
});
const yamlString = YAML.stringify(addExtraFields(commitsClean), 4);
console.log(commitsClean);
const tagName = [tag.major, tag.minor, tag.patch].join(".");
fs.writeFileSync(`data/${tagName}.yml`, yamlString);
}
const sections = {
commands: {},
notes: {},
other: {},
pods: {},
schemas: {},
workspace: {},
};
main();
| oolonek/dendron-site |
<|start_filename|>IfcSql/ifcSQL_for_ifcSQL_instance.cs<|end_filename|>
// ifcSQL_for_ifcSQL_Instance_db_generated.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using db;
namespace ifcSQL{//########################################################################
// namespace overview and template for filenames:
namespace ifcInstance{}
namespace ifcProject{}
// Overview and Template for class-extending:
namespace ifcInstance{//=====================================================================
public partial class Entity_Row : RowBase{}
public partial class EntityAttributeListElementOfBinary_Row : RowBase{}
public partial class EntityAttributeListElementOfEntityRef_Row : RowBase{}
public partial class EntityAttributeListElementOfFloat_Row : RowBase{}
public partial class EntityAttributeListElementOfInteger_Row : RowBase{}
public partial class EntityAttributeListElementOfList_Row : RowBase{}
public partial class EntityAttributeListElementOfListElementOfEntityRef_Row : RowBase{}
public partial class EntityAttributeListElementOfListElementOfFloat_Row : RowBase{}
public partial class EntityAttributeListElementOfListElementOfInteger_Row : RowBase{}
public partial class EntityAttributeListElementOfString_Row : RowBase{}
public partial class EntityAttributeOfBinary_Row : RowBase{}
public partial class EntityAttributeOfBoolean_Row : RowBase{}
public partial class EntityAttributeOfEntityRef_Row : RowBase{}
public partial class EntityAttributeOfEnum_Row : RowBase{}
public partial class EntityAttributeOfFloat_Row : RowBase{}
public partial class EntityAttributeOfInteger_Row : RowBase{}
public partial class EntityAttributeOfList_Row : RowBase{}
public partial class EntityAttributeOfString_Row : RowBase{}
public partial class EntityAttributeOfVector_Row : RowBase{}
public partial class EntityVariableName_Row : RowBase{}
}// namespace ifcInstance -------------------------------------------------------------------
namespace ifcProject{//=====================================================================
public partial class EntityInstanceIdAssignment_Row : RowBase{}
public partial class LastGlobalEntityInstanceId_Row : RowBase{}
public partial class Project_Row : RowBase{}
public partial class ProjectGroup_Row : RowBase{}
public partial class ProjectGroupType_Row : RowBase{}
}// namespace ifcProject -------------------------------------------------------------------
//#############################################################################################
//#############################################################################################
namespace ifcInstance{//=====================================================================
public partial class Entity_Row : RowBase{
public Entity_Row(long GlobalEntityInstanceId, int EntityTypeId){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.EntityTypeId=EntityTypeId;}
public Entity_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] public long GlobalEntityInstanceId=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] [References(RefTableSchema="ifcSchema",RefTableName="Type",RefTableColName="TypeId")] public int EntityTypeId=0;
}
public partial class EntityAttributeListElementOfBinary_Row : RowBase{
public EntityAttributeListElementOfBinary_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position, string Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;this.Value=Value;}
public EntityAttributeListElementOfBinary_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
[DbField] [UserType(schema="ifcType",name="ifcBINARY")] public string Value="";
}
public partial class EntityAttributeListElementOfEntityRef_Row : RowBase{
public EntityAttributeListElementOfEntityRef_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position, long Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;this.Value=Value;}
public EntityAttributeListElementOfEntityRef_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
[DbField] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long Value=0;
}
public partial class EntityAttributeListElementOfFloat_Row : RowBase{
public EntityAttributeListElementOfFloat_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position, double Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;this.Value=Value;}
public EntityAttributeListElementOfFloat_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
[DbField] [UserType(schema="ifcType",name="ifcREAL")] public double Value=0;
}
public partial class EntityAttributeListElementOfInteger_Row : RowBase{
public EntityAttributeListElementOfInteger_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position, int Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;this.Value=Value;}
public EntityAttributeListElementOfInteger_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
[DbField] [UserType(schema="ifcType",name="ifcINTEGER")] public int Value=0;
}
public partial class EntityAttributeListElementOfList_Row : RowBase{
public EntityAttributeListElementOfList_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;}
public EntityAttributeListElementOfList_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
}
public partial class EntityAttributeListElementOfListElementOfEntityRef_Row : RowBase{
public EntityAttributeListElementOfListElementOfEntityRef_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position, int ListDim2Position, long Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;this.ListDim2Position=ListDim2Position;this.Value=Value;}
public EntityAttributeListElementOfListElementOfEntityRef_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim2Position=0;
[DbField] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long Value=0;
}
public partial class EntityAttributeListElementOfListElementOfFloat_Row : RowBase{
public EntityAttributeListElementOfListElementOfFloat_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position, int ListDim2Position, double Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;this.ListDim2Position=ListDim2Position;this.Value=Value;}
public EntityAttributeListElementOfListElementOfFloat_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim2Position=0;
[DbField] [UserType(schema="ifcType",name="ifcREAL")] public double Value=0;
}
public partial class EntityAttributeListElementOfListElementOfInteger_Row : RowBase{
public EntityAttributeListElementOfListElementOfInteger_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position, int ListDim2Position, int Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;this.ListDim2Position=ListDim2Position;this.Value=Value;}
public EntityAttributeListElementOfListElementOfInteger_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim2Position=0;
[DbField] [UserType(schema="ifcType",name="ifcINTEGER")] public int Value=0;
}
public partial class EntityAttributeListElementOfString_Row : RowBase{
public EntityAttributeListElementOfString_Row(long GlobalEntityInstanceId, int OrdinalPosition, int ListDim1Position, string Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.ListDim1Position=ListDim1Position;this.Value=Value;}
public EntityAttributeListElementOfString_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int ListDim1Position=0;
[DbField] [UserType(schema="ifcType",name="ifcSTRING")] public string Value="";
}
public partial class EntityAttributeOfBinary_Row : RowBase{
public EntityAttributeOfBinary_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId, string Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;this.Value=Value;}
public EntityAttributeOfBinary_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
[DbField] [UserType(schema="ifcType",name="ifcBINARY")] public string Value="";
}
public partial class EntityAttributeOfBoolean_Row : RowBase{
public EntityAttributeOfBoolean_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId, bool Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;this.Value=Value;}
public EntityAttributeOfBoolean_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
[DbField] [UserType(schema="ifcType",name="ifcBOOLEAN")] public bool Value=false;
}
public partial class EntityAttributeOfEntityRef_Row : RowBase{
public EntityAttributeOfEntityRef_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId, long Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;this.Value=Value;}
public EntityAttributeOfEntityRef_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
[DbField] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long Value=0;
}
public partial class EntityAttributeOfEnum_Row : RowBase{
public EntityAttributeOfEnum_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId, int Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;this.Value=Value;}
public EntityAttributeOfEnum_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
[DbField] [UserType(schema="ifcEnum",name="Id")] public int Value=0;
}
public partial class EntityAttributeOfFloat_Row : RowBase{
public EntityAttributeOfFloat_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId, double Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;this.Value=Value;}
public EntityAttributeOfFloat_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
[DbField] [UserType(schema="ifcType",name="ifcREAL")] public double Value=0;
}
public partial class EntityAttributeOfInteger_Row : RowBase{
public EntityAttributeOfInteger_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId, int Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;this.Value=Value;}
public EntityAttributeOfInteger_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
[DbField] [UserType(schema="ifcType",name="ifcINTEGER")] public int Value=0;
}
public partial class EntityAttributeOfList_Row : RowBase{
public EntityAttributeOfList_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;}
public EntityAttributeOfList_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
}
public partial class EntityAttributeOfString_Row : RowBase{
public EntityAttributeOfString_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId, string Value){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;this.Value=Value;}
public EntityAttributeOfString_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
[DbField] [UserType(schema="ifcType",name="ifcSTRING")] public string Value="";
}
public partial class EntityAttributeOfVector_Row : RowBase{
public EntityAttributeOfVector_Row(long GlobalEntityInstanceId, int OrdinalPosition, int TypeId, double X, double Y, double? Z){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.OrdinalPosition=OrdinalPosition;this.TypeId=TypeId;this.X=X;this.Y=Y;this.Z=Z;}
public EntityAttributeOfVector_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcOrder",name="Position")] public int OrdinalPosition=0;
[DbField] [UserType(schema="ifcSchema",name="Id")] public int TypeId=0;
[DbField] [UserType(schema="ifcType",name="ifcREAL")] public double X=0;
[DbField] [UserType(schema="ifcType",name="ifcREAL")] public double Y=0;
[DbField] [UserType(schema="ifcType",name="ifcREAL")] public double? Z=null;
}
public partial class EntityVariableName_Row : RowBase{
public EntityVariableName_Row(long GlobalEntityInstanceId, string VarableName){this.GlobalEntityInstanceId=GlobalEntityInstanceId;this.VarableName=VarableName;}
public EntityVariableName_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
[DbField] [UserType(schema="ifcType",name="ifcSTRING")] public string VarableName="";
}
}// namespace ifcInstance -------------------------------------------------------------------
namespace ifcProject{//=====================================================================
public partial class EntityInstanceIdAssignment_Row : RowBase{
public EntityInstanceIdAssignment_Row(int ProjectId, long ProjectEntityInstanceId, long GlobalEntityInstanceId){this.ProjectId=ProjectId;this.ProjectEntityInstanceId=ProjectEntityInstanceId;this.GlobalEntityInstanceId=GlobalEntityInstanceId;}
public EntityInstanceIdAssignment_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcProject",name="Id")] [References(RefTableSchema="ifcProject",RefTableName="Project",RefTableColName="ProjectId")] public int ProjectId=0;
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcInstance",name="Id")] public long ProjectEntityInstanceId=0;
[DbField] [UserType(schema="ifcInstance",name="Id")] [References(RefTableSchema="ifcInstance",RefTableName="Entity",RefTableColName="GlobalEntityInstanceId")] public long GlobalEntityInstanceId=0;
}
public partial class LastGlobalEntityInstanceId_Row : RowBase{
public LastGlobalEntityInstanceId_Row(int ProjectId, long GlobalEntityInstanceId){this.ProjectId=ProjectId;this.GlobalEntityInstanceId=GlobalEntityInstanceId;}
public LastGlobalEntityInstanceId_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcProject",name="Id")] public int ProjectId=0;
[DbField] [UserType(schema="ifcInstance",name="Id")] public long GlobalEntityInstanceId=0;
}
public partial class Project_Row : RowBase{
public Project_Row(int ProjectId, string ProjectName, string ProjectDescription, int ProjectGroupId, int SpecificationId){this.ProjectId=ProjectId;this.ProjectName=ProjectName;this.ProjectDescription=ProjectDescription;this.ProjectGroupId=ProjectGroupId;this.SpecificationId=SpecificationId;}
public Project_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcProject",name="Id")] public int ProjectId=0;
[DbField] [UserType(schema="Text",name="ToString")] public string ProjectName="";
[DbField] [UserType(schema="Text",name="Description")] public string ProjectDescription="";
[DbField] [UserType(schema="ifcProject",name="Id")] [References(RefTableSchema="ifcProject",RefTableName="ProjectGroup",RefTableColName="ProjectGroupId")] public int ProjectGroupId=0;
[DbField] [UserType(schema="ifcSchema",name="GroupId")] [References(RefTableSchema="ifcSpecification",RefTableName="Specification",RefTableColName="SpecificationId")] public int SpecificationId=0;
}
public partial class ProjectGroup_Row : RowBase{
public ProjectGroup_Row(int ProjectGroupId, string ProjectGroupName, string ProjectGroupDescription, int? ParentProjectGroupId, int ProjectGroupTypeId){this.ProjectGroupId=ProjectGroupId;this.ProjectGroupName=ProjectGroupName;this.ProjectGroupDescription=ProjectGroupDescription;this.ParentProjectGroupId=ParentProjectGroupId;this.ProjectGroupTypeId=ProjectGroupTypeId;}
public ProjectGroup_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcProject",name="Id")] public int ProjectGroupId=0;
[DbField] [UserType(schema="Text",name="ToString")] public string ProjectGroupName="";
[DbField] [UserType(schema="Text",name="Description")] public string ProjectGroupDescription="";
[DbField] [UserType(schema="ifcProject",name="Id")] [References(RefTableSchema="ifcProject",RefTableName="ProjectGroup",RefTableColName="ProjectGroupId")] public int? ParentProjectGroupId=null;
[DbField] [UserType(schema="ifcProject",name="Id")] [References(RefTableSchema="ifcProject",RefTableName="ProjectGroupType",RefTableColName="ProjectGroupTypeId")] public int ProjectGroupTypeId=0;
}
public partial class ProjectGroupType_Row : RowBase{
public ProjectGroupType_Row(int ProjectGroupTypeId, string ProjectGroupTypeName, string ProjectGroupTypeDescription){this.ProjectGroupTypeId=ProjectGroupTypeId;this.ProjectGroupTypeName=ProjectGroupTypeName;this.ProjectGroupTypeDescription=ProjectGroupTypeDescription;}
public ProjectGroupType_Row(){}
[DbField(PrimaryKey=true, SortAscending=true)] [UserType(schema="ifcProject",name="Id")] public int ProjectGroupTypeId=0;
[DbField] [UserType(schema="Text",name="ToString")] public string ProjectGroupTypeName="";
[DbField] [UserType(schema="Text",name="Description")] public string ProjectGroupTypeDescription="";
}
}// namespace ifcProject -------------------------------------------------------------------
public partial class cp_Schema:SchemaBase{// -------------------------------------------------------------------
public TableBase Entity=new RowList<ifcInstance.Entity_Row>();
public TableBase EntityAttributeListElementOfBinary=new RowList<ifcInstance.EntityAttributeListElementOfBinary_Row>();
public TableBase EntityAttributeListElementOfEntityRef=new RowList<ifcInstance.EntityAttributeListElementOfEntityRef_Row>();
public TableBase EntityAttributeListElementOfFloat=new RowList<ifcInstance.EntityAttributeListElementOfFloat_Row>();
public TableBase EntityAttributeListElementOfInteger=new RowList<ifcInstance.EntityAttributeListElementOfInteger_Row>();
public TableBase EntityAttributeListElementOfList=new RowList<ifcInstance.EntityAttributeListElementOfList_Row>();
public TableBase EntityAttributeListElementOfListElementOfEntityRef=new RowList<ifcInstance.EntityAttributeListElementOfListElementOfEntityRef_Row>();
public TableBase EntityAttributeListElementOfListElementOfFloat=new RowList<ifcInstance.EntityAttributeListElementOfListElementOfFloat_Row>();
public TableBase EntityAttributeListElementOfListElementOfInteger=new RowList<ifcInstance.EntityAttributeListElementOfListElementOfInteger_Row>();
public TableBase EntityAttributeListElementOfString=new RowList<ifcInstance.EntityAttributeListElementOfString_Row>();
public TableBase EntityAttributeOfBinary=new RowList<ifcInstance.EntityAttributeOfBinary_Row>();
public TableBase EntityAttributeOfBoolean=new RowList<ifcInstance.EntityAttributeOfBoolean_Row>();
public TableBase EntityAttributeOfEntityRef=new RowList<ifcInstance.EntityAttributeOfEntityRef_Row>();
public TableBase EntityAttributeOfEnum=new RowList<ifcInstance.EntityAttributeOfEnum_Row>();
public TableBase EntityAttributeOfFloat=new RowList<ifcInstance.EntityAttributeOfFloat_Row>();
public TableBase EntityAttributeOfInteger=new RowList<ifcInstance.EntityAttributeOfInteger_Row>();
public TableBase EntityAttributeOfList=new RowList<ifcInstance.EntityAttributeOfList_Row>();
public TableBase EntityAttributeOfString=new RowList<ifcInstance.EntityAttributeOfString_Row>();
public TableBase EntityAttributeOfVector=new RowList<ifcInstance.EntityAttributeOfVector_Row>();
public TableBase EntityInstanceIdAssignment=new RowList<ifcProject.EntityInstanceIdAssignment_Row>();
public TableBase EntityVariableName=new RowList<ifcInstance.EntityVariableName_Row>();
public TableBase Project=new RowList<ifcProject.Project_Row>();
}// of cp_Schema // -------------------------------------------------------------------
/// <summary>DataSource with the name "ifcSQL" for Software "ifc_in_out_sql"</summary>
public partial class _ifcSQL_for_ifcSQL_instance:TableSet{ //assign Tables to the TableSet
public _ifcSQL_for_ifcSQL_instance(string ServerName, string DatabaseName="ifcSQL_Instance"):base(ServerName,DatabaseName){}
public _ifcSQL_for_ifcSQL_instance():base(){}
public cp_Schema cp =new cp_Schema();
}
}// namespace ifc_in_out_sql ########################################################################
<|start_filename|>IfcSqlite/ifc_sqlite_data.cs<|end_filename|>
// ifc_sqlite_data.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System.Collections.Generic;
using System.Linq;
using System.Data.SQLite;
using NetSystem = System;
namespace ifc
{
public class SQLiteDataSet
{
public IList<SQLiteDataTable> Tables { get; internal set; }
public SQLiteDataSet()
{
this.Tables = new List<SQLiteDataTable>();
}
public void Clear()
{
Tables.Clear();
}
}
public class SQLiteDataTable
{
public string Name { get; private set; }
public IList<SQLiteDataRow> Rows { get; private set; }
public SQLiteDataTable(string name)
{
this.Name = name;
this.Rows = new List<SQLiteDataRow>();
}
}
public class SQLiteDataRow
{
public IList<SQLiteDataField> Fields { get; private set; }
public SQLiteDataRow()
{
this.Fields = new List<SQLiteDataField>();
}
public int Id
{
get
{
int id = -1;
SQLiteDataField field = Fields.FirstOrDefault(f => f.Parameter.ParameterName == "Id");
if(field != null && field.Parameter.Value != null)
id = (int)field.Parameter.Value;
return id;
}
}
public bool IsEmpty
{
get
{
return !Fields.Any(f => f.Parameter.Value != null);
}
}
public void OrderValuesByOrdinalPosition()
{
Fields = Fields.OrderBy(value => value.OrdinalPosition).ToList();
}
}
public class SQLiteDataField
{
public int OrdinalPosition { get; private set; }
public SQLiteParameter Parameter { get; private set; }
public SQLiteDataField(int ordinalPos, string name, NetSystem.Data.DbType dbType)
{
Parameter = new SQLiteParameter(name, dbType);
OrdinalPosition = ordinalPos;
}
public SQLiteDataField(int ordinalPos, string name, NetSystem.Data.DbType dbType, bool optional, object value)
{
Parameter = new SQLiteParameter(name, dbType);
Parameter.Value = value;
Parameter.IsNullable = optional;
OrdinalPosition = ordinalPos;
}
public SQLiteDataField(int ordinalPos, string name, bool optional, object value)
{
Parameter = new SQLiteParameter(name);
Parameter.Value = value;
Parameter.IsNullable = optional;
OrdinalPosition = ordinalPos;
}
public SQLiteDataField(int ordinalPos, string name, object value)
{
Parameter = new SQLiteParameter(name);
Parameter.Value = value;
OrdinalPosition = ordinalPos;
}
}
}
<|start_filename|>IfcSharpCore/ifc_schema.cs<|end_filename|>
// ifc_schema.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using NetSystem=System;
using System.Collections.Generic;
using System.Reflection;
namespace ifc{//===================================================================================
public partial class ENTITY{//=====================================================================
public class AttribInfo{
public AttribInfo(FieldInfo field,int OrdinalPosition, bool IsDerived,bool optional){this.field=field;this.OrdinalPosition=OrdinalPosition;this.IsDerived=IsDerived;this.optional=optional;}
public FieldInfo field=null;
public int OrdinalPosition=0;
public bool IsDerived=false;
public bool optional=false;
}
public class AttribListType:List<AttribInfo>{//-----------------------------------------------------
public AttribListType(){}
public AttribListType(NetSystem.Type EntityType){
TemporaryAttribDict.Clear();int VarCount=0;
foreach (FieldInfo field in EntityType.GetFields(BindingFlags.Public|BindingFlags.Instance|BindingFlags.FlattenHierarchy))
foreach (NetSystem.Attribute attr in field.GetCustomAttributes(true)) if (attr is ifcAttribute) {TemporaryAttribDict.Add(((ifcAttribute)attr).OrdinalPosition,new AttribInfo(field,((ifcAttribute)attr).OrdinalPosition,((ifcAttribute)attr).derived,((ifcAttribute)field.GetCustomAttributes(inherit:(true))[0]).optional));VarCount++;}
for (int i=1;i<=VarCount;i++) this.Add(TemporaryAttribDict[i]);
}
public static Dictionary<int,AttribInfo> TemporaryAttribDict=new Dictionary<int,AttribInfo>();
};//------------------------------------------------------------------------------------------------
public class InversListType:List<FieldInfo>{//-----------------------------------------------------
public InversListType(){}
public InversListType(NetSystem.Type EntityType){foreach (FieldInfo field in EntityType.GetFields(BindingFlags.Public|BindingFlags.Instance|BindingFlags.FlattenHierarchy))
foreach (NetSystem.Attribute attr in field.GetCustomAttributes(true)) if (attr is ifcInverseAttribute) this.Add(field);
}
}//------------------------------------------------------------------------------------------------
public class ComponentsType{//---------------------------------------------------------------------
public ComponentsType(){}
public ComponentsType(NetSystem.Type EntityType){this.EntityType=EntityType;AttribList=new AttribListType(EntityType);InversList=new InversListType(EntityType);}
public NetSystem.Type EntityType=null;
public AttribListType AttribList=null;
public InversListType InversList=null;
}//------------------------------------------------------------------------------------------------
public static class TypeDictionary{//--------------------------------------------------------------
public static ComponentsType GetComponents(NetSystem.Type EntityType){//...........................
if (!EntityTypeComponentsDict.ContainsKey(EntityType)) EntityTypeComponentsDict.Add(EntityType,new ComponentsType(EntityType));
return EntityTypeComponentsDict[EntityType];
}//................................................................................................
public static Dictionary<NetSystem.Type,ComponentsType> EntityTypeComponentsDict=new Dictionary<NetSystem.Type,ComponentsType>();
public static List<ComponentsType> EntityTypeComponentsList=new List<ComponentsType>();
public static void FillEntityTypeComponentsDict(){//.............................................................
foreach (NetSystem.Type t in NetSystem.Reflection.Assembly.GetAssembly(typeof(ifc.ENTITY)).GetTypes())
if (t.IsClass) if (!t.IsAbstract) if (t.IsSubclassOf(typeof(ifc.ENTITY))) EntityTypeComponentsList.Add(new ComponentsType(t));
foreach (ComponentsType ct in EntityTypeComponentsList) EntityTypeComponentsDict.Add(ct.EntityType,ct);
foreach ( NetSystem.Reflection.Assembly a in NetSystem.AppDomain.CurrentDomain.GetAssemblies()) foreach (NetSystem.Type t in a.GetTypes())
{if ( (t.IsEnum)
|| (t.IsSubclassOf(typeof(ifc.ENTITY)))
|| (t.IsSubclassOf(typeof(ifc.SELECT)))
|| (t.IsSubclassOf(typeof(ifc.TypeBase)))
|| (typeof(ifcListInterface).IsAssignableFrom(t))
) foreach (NetSystem.Attribute attr in t.GetCustomAttributes(true)) if (attr is ifcSqlAttribute) if (((ifcSqlAttribute)attr).SqlTypeId!=0)
{if (TypeIdNameDict.ContainsKey( ((ifcSqlAttribute)attr).SqlTypeId ) ) throw new NetSystem.Exception("Error on FillEntityTypeComponentsDict: double (Sql)TypeId="+((ifcSqlAttribute)attr).SqlTypeId);
TypeIdNameDict.Add( ((ifcSqlAttribute)attr).SqlTypeId, t.Name);
TypeIdTypeDict.Add( ((ifcSqlAttribute)attr).SqlTypeId, t);
}
}
}//................................................................................................
public static Dictionary<int,string> TypeIdNameDict=new Dictionary<int, string>();
public static Dictionary<int,NetSystem.Type> TypeIdTypeDict=new Dictionary<int,NetSystem.Type >();
}//------------------------------------------------------------------------------------------------
}// of ENTITY =====================================================================================
}// ifc============================================================================================
<|start_filename|>IfcSharpCore/ifc_base_type.cs<|end_filename|>
// ifc_base_type.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
using System.Globalization;
namespace ifc{//==============================
public class TypeBase:ifcSqlType{
public virtual Type GetBaseType(){return null;}
public static char StringChar='\'';
public static bool HasStringChar=true;
public virtual object ToSqliteValue() { return DBNull.Value; }//EF20200131:Added custom converter method for all TypeBase objects
}
public partial class TYPE<T>:TypeBase{//,ifcParseInterface {//--------------------------------
public TYPE(){IsNull=true;}
public TYPE(T v){IsNull=false;TypeValue=v;}
public T TypeValue;
public override Type GetBaseType(){return typeof(T);}
public override string ToString(){if (IsNull) return "$";
else if ( typeof(T).Equals(typeof(double)) ) return ((double)(object)TypeValue).ToString("0.0000000000",CultureInfo.InvariantCulture).TrimEnd('0');
else if ( typeof(T).Equals(typeof(bool)) ) return ((bool)(object)TypeValue)?".T.":".F.";
else if ( typeof(T).Equals(typeof(string)) ) {if (HasStringChar) return StringChar+((string)(object)TypeValue).ToString()+StringChar; else return ((string)(object)TypeValue).ToString();}
else return TypeValue.ToString();}
}//-----------------------------------------------------
}// ifc=======================================
<|start_filename|>IfcSql/ifcSQL_db_interface.cs<|end_filename|>
// ifcSQL_db_interface.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
using System.ComponentModel;
using System.Data;
using System.Data.Common;
using System.Data.SqlClient;
using System.IO;
using System.Reflection;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Globalization;
using System.Text;
namespace db{//====================================================================================
public class DbField : System.Attribute { public bool PrimaryKey=false; public bool SortAscending=false; public bool SortDescending=false;}
[System.AttributeUsage(System.AttributeTargets.All,AllowMultiple = true)] public class References : System.Attribute {public string RefTableSchema=null;public string RefTableName=null;public string RefTableColName=null;}
public class UserType : System.Attribute {public string schema=null;public string name=null;}
public partial class RowBase{//--------------------------------------------------------------------
public RowBase(){}
public RowBase(SqlDataReader reader){FromReader(reader);}
public RowBase FromReader(SqlDataReader reader){foreach (FieldInfo field in this.GetType().GetFields()) foreach (Attribute attr in field.GetCustomAttributes(inherit:false)) if (attr is DbField) if (!reader.IsDBNull(reader.GetOrdinal(field.Name)) ) field.SetValue(this,reader[field.Name]);return this;}
public void AddDataTableColumns(DataTable table){foreach (FieldInfo field in this.GetType().GetFields()) foreach (Attribute attr in field.GetCustomAttributes(inherit:false)) if (attr is DbField) table.Columns.Add(new DataColumn(columnName:field.Name,dataType:Nullable.GetUnderlyingType(field.FieldType) ?? field.FieldType));}
public DataRow DataTableRow(DataTable table){DataRow row=table.NewRow();foreach (FieldInfo field in this.GetType().GetFields()) foreach (Attribute attr in field.GetCustomAttributes(inherit:false)) if (attr is DbField) row[field.Name]=(Nullable.GetUnderlyingType(field.FieldType)!= null)?(field.GetValue(this)??DBNull.Value):field.GetValue(this);return row;}
public string InsertStringOpen(string TableName){string s="INSERT INTO "+TableName+"(";int col=0;foreach (FieldInfo field in this.GetType().GetFields()) foreach (Attribute attr in field.GetCustomAttributes(inherit:false)) if (attr is DbField) s+=((++col>1)?",":"")+field.Name;s+=") VALUES\r\n";return s;}
public string InsertStringValuesRow(){string s="(";int col=0;foreach (FieldInfo field in this.GetType().GetFields()) foreach (Attribute attr in field.GetCustomAttributes(inherit:false)) if (attr is DbField) s+=((++col>1)?",":"")+DbFieldValueStr(field);s+=")\r\n";return s;}
public string InsertStringClose(){return "\r\n";}
public string DbFieldValueStr(FieldInfo field) {//.................................................
if (field.FieldType==typeof(System.String)) return "\'"+field.GetValue(this).ToString()+"\'";
else if (field.FieldType==typeof(System.DateTime)) return "\'"+((DateTime)field.GetValue(this)).ToString("yyy-MM-ddTHH:mm:ss.fff")+"\'"; //SET startDate = CONVERT(datetime,'2015-03-11T23:59:59.000',126)
else if (field.FieldType==typeof(System.Double)) return String.Format(CultureInfo.InvariantCulture,"{0:F3}",(double)field.GetValue(this));
else if (field.FieldType==typeof(System.Decimal)) return String.Format(CultureInfo.InvariantCulture,"{0:F3}",(double)(decimal)field.GetValue(this));
else if (field.FieldType==typeof(System.Single)) return String.Format(CultureInfo.InvariantCulture,"{0:F3}",(Single)field.GetValue(this));
else if (field.FieldType==typeof(System.Int32)) return field.GetValue(this).ToString();
else if (field.FieldType==typeof(System.Int64)) return field.GetValue(this).ToString();
else if (field.FieldType==typeof(Nullable<double>)) { if (field.GetValue(this)==null) return "null"; else return field.GetValue(this).ToString();}
else if (field.FieldType==typeof(System.Boolean)) return (field.GetValue(this).ToString()=="True")?"-1":"0";
else if (field.FieldType==typeof(System.Byte)) return field.GetValue(this).ToString();
else return "unknown Type "+field.FieldType.ToString();
}//................................................................................................
}//------------------------------------------------------------------------------------------------
public class TableBase : List<Object>{//-----------------------------------------------------------
public string TableName="-";
public TableSet tableSet=null;
public virtual void SelectAll(){}
public virtual string InsertString(){return "-";} // better using Interface
public virtual void BulkInsert(){}
}// of TableBase ----------------------------------------------------------------------------------
public class RowList<T> : TableBase where T : new(){//---------------------------------------------
public override void SelectAll(){SqlCommand cmd = new SqlCommand("select * from "+TableName,tableSet.conn);
using (SqlDataReader reader = cmd.ExecuteReader()) while (reader.Read()) {Object rb = new T();this.Add(((RowBase)rb).FromReader(reader));}
}
public override string InsertString(){Object o = new T();RowBase rb=((RowBase)o);string s=rb.InsertStringOpen(TableName);int pos=0;foreach (RowBase row in this) s+=((++pos>1)?",":"")+row.InsertStringValuesRow();s+=rb.InsertStringClose();return s; }
public override void BulkInsert(){using (SqlBulkCopy bulkCopy = new SqlBulkCopy(tableSet.conn))// ..........................
{ bulkCopy.DestinationTableName = TableName; //Console.WriteLine("TableName="+TableName+": "+InsertString());
bulkCopy.WriteToServer(FilledDataTable()); // if (TableName=="[cp].[EntityAttributeOfString]") {bulkCopy.BatchSize=100000;bulkCopy.BulkCopyTimeout=3;} sometimes timeout, don't no why
}
}//.........................................................................................................................
private DataTable FilledDataTable() {//............................................................
DataTable table = new DataTable();
Object rb = new T();((RowBase)rb).AddDataTableColumns(table);
foreach (RowBase row in this) table.Rows.Add(row.DataTableRow(table));
return table;
}//............................................................
}//------------------------------------------------------------------------------------------------
public partial class SchemaBase{}//----------------------------------------------------------------
public class TableSet{//---------------------------------------------------------------------------
public TableSet(){AssignTableNames();}
public TableSet(string ServerName,string DatabaseName){this.ServerName=ServerName;this.DatabaseName=DatabaseName;AssignTableNames();
conn=new SqlConnection("Persist Security Info=False;Integrated Security=true;Initial Catalog="+DatabaseName+";server="+ServerName);
}
public void AssignTableNames(){foreach (FieldInfo SchemaField in this.GetType().GetFields()) if (SchemaField.GetValue(this) is SchemaBase)
foreach (FieldInfo TableField in SchemaField.GetValue(this).GetType().GetFields()) if (TableField.GetValue(SchemaField.GetValue(this)) is TableBase)
{ ((TableBase)TableField.GetValue(SchemaField.GetValue(this))).TableName="["+this.DatabaseName+"].["+SchemaField.Name+"].["+TableField.Name+"]";
((TableBase)TableField.GetValue(SchemaField.GetValue(this))).tableSet=this;
}
}
public void LoadAllTables(){conn.Open();
foreach (FieldInfo SchemaField in this.GetType().GetFields()) if (SchemaField.GetValue(this) is SchemaBase)
foreach (FieldInfo TableField in SchemaField.GetValue(this).GetType().GetFields()) if (TableField.GetValue(SchemaField.GetValue(this)) is TableBase)
((TableBase)TableField.GetValue(SchemaField.GetValue(this))).SelectAll();
conn.Close();
}
public string DatabaseName="-";
public string ServerName="-";
public SqlConnection conn=null;
public DbCommand Command(string cmd) {return new SqlCommand (cmd,conn);}
public void ExecuteNonQuery(string sql, bool DoOpenAndClose=false){if (DoOpenAndClose) conn.Open();Command(sql).ExecuteNonQuery();if (DoOpenAndClose) conn.Close();}
public int ExecuteIntegerScalar (string sql, bool DoOpenAndClose=false){if (DoOpenAndClose) conn.Open();var result=Command(sql).ExecuteScalar();int i=(int)result;if (DoOpenAndClose) conn.Close();return i;}
public long ExecuteLongScalar (string sql, bool DoOpenAndClose=false){if (DoOpenAndClose) conn.Open();var result=Command(sql).ExecuteScalar();long i=(long)result;if (DoOpenAndClose) conn.Close();return i;}
}//------------------------------------------------------------------------------------------------
}//of namspace db =================================================================================
<|start_filename|>IfcSharpCore/ifc_in_shared.cs<|end_filename|>
// ifc_in_shared.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
namespace ifc{//==============================
public partial class ENTITY{//==========================================================================================
// public static object Parse2TYPE(string value,Type FieldType)
// public static Type GetGenericType(Type FieldType)
// public static object ParseSelect(string Element,object o)
// ..
}//ENTITY=====================================================================================================================
}// ifc==============================
<|start_filename|>IfcSharpCore/ifc_base_sql.cs<|end_filename|>
// ifc_base_sql.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
namespace ifc{//==============================
enum SqlTable{
_NULL=-2, // BaseTypeGroup: _NULL, NestLevel=-2
SELECT=-1, // BaseTypeGroup: SELECT, NestLevel=-1
EntityAttributeOfVector=0, // BaseTypeGroup: Vector (2d and 3d), NestLevel=0
EntityAttributeOfBinary=1, // BaseTypeGroup: Binary, NestLevel=0
EntityAttributeOfBoolean=2, // BaseTypeGroup: Boolean, NestLevel=0
EntityAttributeOfEntityRef=3, // BaseTypeGroup: EntityRef, NestLevel=0
EntityAttributeOfEnum=4, // BaseTypeGroup: Enum, NestLevel=0
EntityAttributeOfFloat=5, // BaseTypeGroup: Float, NestLevel=0
EntityAttributeOfInteger=6, // BaseTypeGroup: Integer, NestLevel=0
EntityAttributeOfString=7, // BaseTypeGroup: String, NestLevel=0
EntityAttributeOfList=8, // BaseTypeGroup: List1, NestLevel=0
EntityAttributeListElementOfBinary=9, // BaseTypeGroup: Binary, NestLevel=1
EntityAttributeListElementOfEntityRef=10, // BaseTypeGroup: EntityRef, NestLevel=1
EntityAttributeListElementOfFloat=11, // BaseTypeGroup: Float, NestLevel=1
EntityAttributeListElementOfInteger=12, // BaseTypeGroup: Integer, NestLevel=1
EntityAttributeListElementOfString=13, // BaseTypeGroup: String, NestLevel=1
EntityAttributeListElementOfList=14, // BaseTypeGroup: List2, NestLevel=1
EntityAttributeListElementOfListElementOfEntityRef=15, // BaseTypeGroup: EntityRef, NestLevel=2
EntityAttributeListElementOfListElementOfFloat=16, // BaseTypeGroup: Float, NestLevel=2
EntityAttributeListElementOfListElementOfInteger=17, // BaseTypeGroup: Integer, NestLevel=2
}
}// ifc==============================
<|start_filename|>IfcSqlite/ifc_in_sqlite.cs<|end_filename|>
// ifc_in_sqlite.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
//#define _DEBUG
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Reflection;
using System.Xml.Linq;
using System.Xml;
using System.Xml.Serialization;
using NetSystem = System;
using System.IO;
namespace ifc
{
public partial class Model
{
public static Model FromSqliteFile(string fullPath)
{
SQLiteDatabase database = new SQLiteDatabase();
Model CurrentModel = new ifc.Model(fullPath.Replace(".sqlite", ""));
DataSet dataSet = database.GetContentAsDataSet(fullPath);
#if _DEBUG
Console.WriteLine(string.Format("Reading SQLite-File: {0}", NetSystem.IO.Path.GetFileName(fullPath)));
Console.WriteLine("======================================================");
#endif
foreach (DataTable dt in dataSet.Tables)
{
#if _DEBUG
Console.WriteLine("______________________________________________________");
Console.WriteLine(dt.TableName);
foreach (DataColumn c in dt.Columns) Console.Write(string.Format("{0} ", c.ColumnName));
Console.Write("\r\n");
#endif
foreach (DataRow row in dt.Rows)
{
Type entityType = Type.GetType("ifc." + dt.TableName);
ENTITY entityInstance;
if (entityType == typeof(EntityComment))
{
EntityComment ec = new EntityComment((string)row["Comment"], (int)row["PreviousEntity"]);
entityInstance = ec;
}
else
{
object[] ctorArgs = GetEntityConstructorArgs(entityType, row);
entityInstance = Activator.CreateInstance(entityType, ctorArgs) as ENTITY;
}
if (row["Id"] != null) entityInstance.LocalId = (int)row["Id"];
CurrentModel.EntityList.Add(entityInstance);
#if _DEBUG
foreach (DataColumn c in dt.Columns) Console.Write(string.Format("{0} ", row[c] is DBNull ? "NULL" : row[c].ToString()));
Console.Write("\r\n");
#endif
}
}
Console.WriteLine("======================================================");
// before we assign the entities, we need to order the list according to the Ids
CurrentModel.EntityList = CurrentModel.EntityList.OrderBy(e => e.LocalId).ToList();
// then we change the position of all EntityComment´s to match the actual order,
// since they are being read sequentially
foreach (EntityComment ec in CurrentModel.EntityList.FindAll(e => e.GetType() == typeof(EntityComment)))
{
int oldIndex = CurrentModel.EntityList.FindIndex(e => e.LocalId == ec.LocalId);
int newIndex = CurrentModel.EntityList.FindIndex(e => e.LocalId == ec.PreviousEntityId) + 1;
var item = CurrentModel.EntityList[oldIndex];
CurrentModel.EntityList.RemoveAt(oldIndex);
if (newIndex > oldIndex) newIndex--; // the actual index could have shifted due to the removal
CurrentModel.EntityList.Insert(newIndex, item);
}
CurrentModel.AssignEntities();
return CurrentModel;
}
private static object[] GetEntityConstructorArgs(Type entityType, DataRow dataRow)
{
Dictionary<int, object> fieldDict = new Dictionary<int, object>();
foreach (FieldInfo field in entityType.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy))
{
foreach (Attribute attr in field.GetCustomAttributes(true))
{
if (attr is ifcAttribute ifcAttribute)
{
object o = null;
string fieldName = field.Name.Replace("_", "");
if (fieldName == "Coordinates")
{
List<LengthMeasure> coords = new List<LengthMeasure>();
if (dataRow["X"] != DBNull.Value) coords.Add(new LengthMeasure((double)dataRow["X"]));
if (dataRow["Y"] != DBNull.Value) coords.Add(new LengthMeasure((double)dataRow["Y"]));
if (dataRow["Z"] != DBNull.Value) coords.Add(new LengthMeasure((double)dataRow["Z"]));
o = new List1to3_LengthMeasure(coords.ToArray());
}
else if (fieldName == "DirectionRatios")
{
#if IFC2X3
List<double> coords = new List<double>();
if (dataRow["X"] != DBNull.Value) coords.Add((double)dataRow["X"]);
if (dataRow["Y"] != DBNull.Value) coords.Add((double)dataRow["Y"]);
if (dataRow["Z"] != DBNull.Value) coords.Add((double)dataRow["Z"]);
o=new List2to3_double();
((List2to3_double)o).Add(coords[0]);
((List2to3_double)o).Add(coords[1]);
((List2to3_double)o).Add(coords[2]);
#else
List<Real> coords = new List<Real>();
if (dataRow["X"] != DBNull.Value) coords.Add(new Real((double)dataRow["X"]));
if (dataRow["Y"] != DBNull.Value) coords.Add(new Real((double)dataRow["Y"]));
if (dataRow["Z"] != DBNull.Value) coords.Add(new Real((double)dataRow["Z"]));
o = new List2to3_Real(coords.ToArray());
#endif
}
else
{
object value = dataRow[fieldName];
o = GetFieldInstance(field, value);
}
fieldDict.Add(ifcAttribute.OrdinalPosition, o);
}
}
}
fieldDict.Add(fieldDict.Count() + 1, dataRow["EndOfLineComment"].GetType() == typeof(DBNull) ? null : dataRow["EndOfLineComment"]);
List<object> args = new List<object>();
foreach (var o in fieldDict.OrderBy(i => i.Key))
args.Add(o.Value);
return args.ToArray();
}
private static object GetFieldInstance(FieldInfo field, object value)
{
Type fieldType = field.FieldType;
object o = null;
if (value == null || value.GetType() == typeof(DBNull)) return null;
else if (fieldType.IsSubclassOf(typeof(Enum))) o = Enum.Parse(fieldType, (string)value);
else if (fieldType.IsSubclassOf(typeof(SELECT))) o = ParseSelect(fieldType, value);
else if (fieldType.IsSubclassOf(typeof(ENTITY)))
{
o = Activator.CreateInstance(fieldType);
if(value != null) ((ENTITY)o).LocalId = (int)value;
}
else if (fieldType.IsSubclassOf(typeof(TypeBase))) o = ParseBaseType(fieldType, value);
else if ((Nullable.GetUnderlyingType(fieldType) != null) && Nullable.GetUnderlyingType(fieldType).IsSubclassOf(typeof(Enum)))
{
if ((string)value != "NULL") o = Enum.Parse(Nullable.GetUnderlyingType(fieldType), (string)value);
}
else if (typeof(ifcListInterface).IsAssignableFrom(fieldType)) o = ParseSTEPList(fieldType, value);
else Console.WriteLine("FieldType: '" + field.FieldType.Name + "' not supported."); //not implemented types
return o;
}
private static object ParseBaseType(Type type, object value)
{
Type baseType = type.BaseType.GetGenericArguments()[0];
if (baseType.BaseType.GetGenericArguments().Length > 0) baseType = baseType.BaseType;
if (value.GetType() == typeof(DBNull) || (baseType == typeof(string) && (string)value == "NULL")) return null;
else return Activator.CreateInstance(type, Convert.ChangeType(value, baseType));
}
private static object ParseSelect(Type selectType, object value)
{
object select = Activator.CreateInstance(selectType);
if (value.GetType() == typeof(string))
{
string s = (string)value;
if (int.TryParse(s, out int id)) ((SELECT)select).Id = id;
else
{
Type valueType = Type.GetType("ifc." + s.Split('|')[0], true, false);
if (valueType.IsSubclassOf(typeof(TypeBase)))
{
object arg = ParseBaseType(valueType, (object)s.Split('|')[1]);
select = Activator.CreateInstance(selectType, arg);
}
}
}
else if (selectType.IsSubclassOf(typeof(TypeBase))) return ParseBaseType(selectType, value);
return select;
}
private static object ParseSTEPList(Type fieldType, object value)
{
//from ifc_in_step.cs: get type of list element
Type genericType;
if (fieldType.BaseType.GetGenericArguments().Length > 0)
genericType = fieldType.BaseType.GetGenericArguments()[0];
else
genericType = fieldType.BaseType.BaseType.GetGenericArguments()[0];
string[] listElements = ((string)value).TrimStart('(').TrimEnd(')').Split(',');
if (listElements.Length == 0) Console.WriteLine(string.Format("empty list at {0}, {1}", fieldType, value));
object[] args = GetStepListArgs(genericType, listElements);
return Activator.CreateInstance(fieldType, args);
}
private static object ParseXMLList(Type fieldType, string xml)
{
//TODO: Implement
return null;
}
private static object[] GetStepListArgs(Type genericType, string[] listElements)
{
// from ifc_in_step.cs
// TODO: refactor
List<object> args = new List<object>();
foreach(string elem in listElements)
{
if (genericType == typeof(Int32))
{
object[] genericCtorArgs = new object[1];
genericCtorArgs[0] = Activator.CreateInstance(genericType);
genericCtorArgs[0] = Int32.Parse(elem);
args.Add(Int32.Parse(elem));
}
else if (genericType.IsSubclassOf(typeof(TypeBase)))
{
object[] genericCtorArgs = new object[1];
genericCtorArgs[0] = Activator.CreateInstance(genericType); //LengthMeasure or CartesianPoint
Type genericBaseType = genericType.BaseType.GetGenericArguments()[0]; //Double from LengthMeasure -> TYPE<double> -> double
if (genericBaseType == typeof(String)) { if (elem == "$") genericCtorArgs[0] = ""; else genericCtorArgs[0] = ifc.IfcString.Decode(elem); }
else if (genericBaseType == typeof(int)) { genericCtorArgs[0] = int.Parse(elem); }
else if (genericBaseType == typeof(Int32)) { genericCtorArgs[0] = Int32.Parse(elem); }
else if (genericBaseType == typeof(double)) { genericCtorArgs[0] = double.Parse(elem, NetSystem.Globalization.CultureInfo.InvariantCulture); }
args.Add(Activator.CreateInstance(genericType, genericCtorArgs));
}
else if (genericType.IsSubclassOf(typeof(ENTITY)))
{
object o = Activator.CreateInstance(genericType);
((ENTITY)o).LocalId = int.Parse(elem.Trim(' ').Substring(1));
args.Add(o);
}
else if (genericType.IsSubclassOf(typeof(SELECT)))
{
object o = Activator.CreateInstance(genericType);
if ((elem.Length > 0) && elem[0] == '#') { ((SELECT)o).Id = int.Parse(elem.Trim(' ').Substring(1)); }
else
{
int posLpar = elem.IndexOf('(');
int posRpar = elem.Length - 1;//.LastIndexOf(')');
string body = elem.Substring(posLpar + 1, posRpar - posLpar - 1); // Argumenkörper extrahieren
string elementName = elem.Substring(0, posLpar);
try
{
Type t = Type.GetType(elementName, true, true);
if (t.IsSubclassOf(typeof(TypeBase)))
{
object[] genericCtorArgs = new object[1];
if (t.IsSubclassOf(typeof(TYPE<string>))) { if (elem == "$") genericCtorArgs[0] = ""; else genericCtorArgs[0] = ifc.IfcString.Decode(body); }
else if (t.IsSubclassOf(typeof(TYPE<int>))) { genericCtorArgs[0] = int.Parse(body); }
else if (t.IsSubclassOf(typeof(TYPE<Int32>))) { genericCtorArgs[0] = Int32.Parse(body); }
else if (t.IsSubclassOf(typeof(TYPE<double>))) { genericCtorArgs[0] = double.Parse(body, NetSystem.Globalization.CultureInfo.InvariantCulture); }
o = Activator.CreateInstance(t, genericCtorArgs);
}
}
catch (Exception e) { Console.WriteLine(e.Message); }
}
args.Add(o);
}
else { Console.WriteLine("TODO List TYPE: Base=" + genericType.Name + " not supportet."); }//not implemented types
}
return args.ToArray();
}
}
}
<|start_filename|>IfcSqlite/ifc_out_sqlite.cs<|end_filename|>
// ifc_out_sqlite.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
//#define EXPORT_COMPLETE_SCHEMA
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Reflection;
using NetSystem = System;
namespace ifc
{
public partial class ENTITY
{
public virtual void ToSqliteDataSet(ref SQLiteDataSet dataSet, bool updateExisting, int prevEntityId)
{
string paramName = "";
// find corresponding datatable
bool addNewTable = false;
SQLiteDataTable dataTable = dataSet.Tables.FirstOrDefault(t => t.Name == this.GetType().Name);
if (dataTable == null)
{
addNewTable = true;
dataTable = new SQLiteDataTable(this.GetType().Name);
}
// find corresponding datarow
bool addNewRow = false;
SQLiteDataRow dataRow = dataTable.Rows.FirstOrDefault(r => r.Id == this.LocalId || r.IsEmpty);
if (dataRow == null)
{
addNewRow = true;
dataRow = new SQLiteDataRow();
}
else
{
SQLiteDataField idField = dataRow.Fields.FirstOrDefault(f => f.Parameter.ParameterName == "Id");
if (idField != null) idField.Parameter.Value = this.LocalId;
}
if (addNewRow == true || updateExisting == true)
{
if (this is CartesianPoint || this is Direction)
{
double X = 0;
double Y = 0;
double? Z = null;
if (this is CartesianPoint cp)
{
if (cp.Coordinates.Count > 1) { X = (double)cp.Coordinates[0]; Y = (double)cp.Coordinates[1]; }
if (cp.Coordinates.Count > 2) { Z = (double)cp.Coordinates[2]; }
}
else if (this is Direction dir)
{
if (dir.DirectionRatios.Count > 1) { X = (double)dir.DirectionRatios[0]; Y = (double)dir.DirectionRatios[1]; }
if (dir.DirectionRatios.Count > 2) { Z = (double)dir.DirectionRatios[2]; }
}
dataRow.Fields.Add(new SQLiteDataField(1, "X", DbType.Double, false, X));
dataRow.Fields.Add(new SQLiteDataField(2, "Y", DbType.Double, false, Y));
dataRow.Fields.Add(new SQLiteDataField(3, "Z", DbType.Double, true, Z));
}
else if (this is EntityComment ec)
{
dataRow.Fields.Add(new SQLiteDataField(1, "Comment", DbType.String, true, ec.CommentLine));
dataRow.Fields.Add(new SQLiteDataField(2, "PreviousEntity", DbType.Int32, false, prevEntityId));
}
else
{
foreach (FieldInfo field in this.GetType().GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy))
{
IEnumerable<ifcAttribute> ifcAttributes = field.GetCustomAttributes(true).Where(a => a is ifcAttribute).Cast<ifcAttribute>();
foreach (ifcAttribute attr in ifcAttributes)
{
object[] fieldAttributes = null;
if (field.FieldType.IsGenericType && field.FieldType.GetGenericArguments()[0].IsEnum && field.FieldType.GetGenericTypeDefinition() == typeof(Nullable<>))
fieldAttributes = field.FieldType.GetGenericArguments()[0].GetCustomAttributes(true);
else
fieldAttributes = field.FieldType.GetCustomAttributes(true);
ifcSqlAttribute sqlAttribute = fieldAttributes.FirstOrDefault(a => a is ifcSqlAttribute) as ifcSqlAttribute;
// each attribute is represented as SQLiteDataField
paramName = field.Name.StartsWith("_") ? field.Name.Substring(1) : field.Name;
if (sqlAttribute != null)
{
SQLiteDataField sqliteField = dataRow.Fields.FirstOrDefault(f => f.Parameter.ParameterName == paramName);
if (sqliteField == null)
{
sqliteField = new SQLiteDataField(attr.OrdinalPosition, paramName, DbTypeFromTableId(sqlAttribute.SqlTableId), attr.optional || attr.derived, SqliteAttributeOut(field, field.GetValue(this)));
dataRow.Fields.Add(sqliteField);
}
else
{
sqliteField.Parameter.Value = SqliteAttributeOut(field, field.GetValue(this));
}
}
}
}
}
}
if (addNewRow)
{
if (dataRow.Fields.Count > 0)
{
dataRow.Fields.Add(new SQLiteDataField(0, "Id", DbType.Int32, false, this.LocalId));
dataRow.Fields.Add(new SQLiteDataField(dataRow.Fields.Count, "EndOfLineComment", DbType.String, true, this.EndOfLineComment));
}
// before we add the row, we sort the values by their ordinal position
dataRow.OrderValuesByOrdinalPosition();
dataTable.Rows.Add(dataRow);
if (addNewTable)
{
dataSet.Tables.Add(dataTable);
}
}
}
public object SqliteAttributeOut(FieldInfo field, object o)
{
if (o == null) return DBNull.Value;
else
{
if (o is Enum) return o.ToString();
else if (o is string) return o.ToString();
else if (o is SELECT select)
{
if (select.IsNull) return "NULL";
else if (select.SelectType().IsSubclassOf(typeof(TypeBase))) return select.SelectType().Name + "|" + select.SelectValue().ToString();
else return SqliteAttributeOut(field, select.SelectValue());
}
else if (o.GetType().IsSubclassOf(typeof(TypeBase))) return ((TypeBase)o).ToSqliteValue();
else if (typeof(ifcListInterface).IsAssignableFrom(o.GetType()))
{
//TODO: Implement "ToXML"
return o.ToString();
}
else return o.ToString().Replace("#", "");
}
}
public static DbType DbTypeFromTableId(int id)
{
switch (id)
{
case (int)SqlTable.EntityAttributeOfBinary: return DbType.Binary;
case (int)SqlTable.EntityAttributeOfBoolean: return DbType.Boolean;
case (int)SqlTable.EntityAttributeOfEntityRef: return DbType.Int32;
case (int)SqlTable.EntityAttributeOfFloat: return DbType.Double;
case (int)SqlTable.EntityAttributeOfInteger: return DbType.Int32;
default: return DbType.String;
}
}
#region DataSet-Interface
public static Type TypeFromTableId(int id)
{
switch (id)
{
case (int)SqlTable.EntityAttributeOfBinary: return typeof(byte);
case (int)SqlTable.EntityAttributeOfBoolean: return typeof(bool);
case (int)SqlTable.EntityAttributeOfEntityRef: return typeof(int);
case (int)SqlTable.EntityAttributeOfFloat: return typeof(double);
case (int)SqlTable.EntityAttributeOfInteger: return typeof(int);
default: return typeof(string);
}
}
public virtual void ToDataSet(ref DataSet dataSet)
{
// find corresponding datatable
DataTable dataTable = dataSet.Tables[this.GetType().Name];
if (dataTable == null) dataTable = new DataTable(this.GetType().Name);
DataColumn dataColumn;
DataRow dataRow = dataTable.NewRow();
foreach (FieldInfo field in this.GetType().GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy))
{
foreach (Attribute attr in field.GetCustomAttributes(true))
{
if (attr is ifcAttribute ifcAttr)
{
string columnName = field.Name.StartsWith("_") ? field.Name.Substring(1) : field.Name;
object value = SqliteAttributeOut(field, field.GetValue(this));
ifcSqlAttribute sqlAttribute = field.FieldType.GetCustomAttributes(true).FirstOrDefault(a => a is ifcSqlAttribute) as ifcSqlAttribute;
dataColumn = dataTable.Columns[columnName];
if (dataColumn == null)
{
dataColumn = new DataColumn(columnName, sqlAttribute == null ? typeof(string) : TypeFromTableId(sqlAttribute.SqlTableId));
dataTable.Columns.Add(dataColumn);
dataColumn.AllowDBNull = ifcAttr.optional;
}
dataRow.SetField(dataColumn, value);
}
}
}
dataColumn = dataTable.Columns["Id"];
if (dataColumn == null)
{
dataColumn = new DataColumn("Id", typeof(int));
dataTable.Columns.Add(dataColumn);
}
dataRow.SetField(dataColumn, this.LocalId);
// before we add the row, we sort the values by their ordinal position
foreach (FieldInfo field in this.GetType().GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy))
{
foreach (Attribute attr in field.GetCustomAttributes(true))
{
if (attr is ifcAttribute ifcAttr)
{
string columnName = field.Name.StartsWith("_") ? field.Name.Substring(1) : field.Name;
dataColumn = dataTable.Columns[columnName];
dataColumn.SetOrdinal(ifcAttr.OrdinalPosition);
}
}
}
dataTable.Rows.Add(dataRow);
if (dataSet.Tables[dataTable.TableName] == null)
{
dataSet.Tables.Add(dataTable);
}
}
#endregion
}
public partial class EntityComment : ENTITY
{
public int PreviousEntityId;
public EntityComment(string CommentLine, int PreviousEntityId) : this(CommentLine)
{
this.PreviousEntityId = PreviousEntityId;
}
}
public partial class TYPE<T> : TypeBase
{
public override object ToSqliteValue()
{
if (IsNull) return DBNull.Value;
else if (typeof(T).Equals(typeof(double))) return ((double)(object)TypeValue).ToString("0.0000000000", NetSystem.Globalization.CultureInfo.InvariantCulture).TrimEnd('0');
else return TypeValue.ToString();
}
}
public partial class LIST<T> : List<T>, ifcListInterface, ifcSqlTypeInterface
{
public string ToXML() { return ""; }
}
public partial class Model
{
public void ToSqliteFile()
{
AssignEntities();
string fullPath = Header.name + ".sqlite";
SQLiteDataSet sqliteDataSet = new SQLiteDataSet();
#if EXPORT_COMPLETE_SCHEMA
BuildIfcDataSet(ref ifcDataSet);
#endif
Console.WriteLine(string.Format("{0}: Exporting Entities to SQLite-File", NetSystem.DateTime.Now.ToString("HH:mm:ss.ffff")));
int prevEntityId = 0;
for (int i = 0; i < ifc.Repository.CurrentModel.EntityList.Count; i++)
{
ENTITY e = ifc.Repository.CurrentModel.EntityList[i];
if (e is ifc.Root) if (((ifc.Root)e).GlobalId == null) ((ifc.Root)e).GlobalId = ifc.GloballyUniqueId.NewId();
e.ToSqliteDataSet(ref sqliteDataSet, true, prevEntityId);
prevEntityId = e.LocalId;
}
//TODO: Check if custom DataSet Class 'SQLiteDataSet' can be omitted and instead use a regular 'DataSet'
//DataSet dataSet = new DataSet("IfcDataSet");
//foreach (ENTITY e in ENTITY.EntityList)
//{
// e.ToDataSet(ref dataSet);
//}
SQLiteDatabase database = new SQLiteDatabase(fullPath);
database.FillFromDataSet(sqliteDataSet);
Console.WriteLine(string.Format("{0}: Finished Export", NetSystem.DateTime.Now.ToString("HH:mm:ss.ffff")));
Console.WriteLine("======================================================");
}
private bool BuildIfcDataSet(ref SQLiteDataSet ifcDataSet)
{
if (ifcDataSet == null)
return false;
SQLiteDataField sqliteField;
string paramName;
foreach (Type t in Assembly.GetAssembly(typeof(ifc.ENTITY)).GetTypes())
{
if (t.IsClass)
{
if (!t.IsAbstract)
{
if (t.IsSubclassOf(typeof(ifc.ENTITY)))
{
SQLiteDataTable dataTable = new SQLiteDataTable(t.Name);
SQLiteDataRow dataRow = new SQLiteDataRow();
foreach (FieldInfo field in t.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy))
{
foreach (Attribute attr in field.GetCustomAttributes(true))
{
if (attr is ifcAttribute ifcAttribute)
{
object[] fieldAttributes = null;
if ((field.FieldType.IsGenericType) && (field.FieldType.GetGenericTypeDefinition() == typeof(Nullable<>)) && (field.FieldType.GetGenericArguments()[0].IsEnum))
{
fieldAttributes = field.FieldType.GetGenericArguments()[0].GetCustomAttributes(true);
}
else
{
fieldAttributes = field.FieldType.GetCustomAttributes(true);
}
if (null != fieldAttributes)
{
foreach (Attribute attr2 in fieldAttributes)
{
if (attr2 is ifc.ifcSqlAttribute sqlAttribute)
{
paramName = field.Name.StartsWith("_") ? field.Name.Substring(1) : field.Name;
sqliteField = new SQLiteDataField(ifcAttribute.OrdinalPosition, paramName, ENTITY.DbTypeFromTableId(sqlAttribute.SqlTableId));
sqliteField.Parameter.IsNullable = ifcAttribute.optional || ifcAttribute.derived;
dataRow.Fields.Add(sqliteField);
}
}
}
}
}
}
dataRow.Fields.Add(new SQLiteDataField(0, "Id", DbType.Int32));
dataRow.Fields.Add(new SQLiteDataField(dataRow.Fields.Count, "EndOfLineComment", DbType.String));
// before we add the row, we sort the values by their ordinal position
dataRow.OrderValuesByOrdinalPosition();
dataTable.Rows.Add(dataRow);
ifcDataSet.Tables.Add(dataTable);
}
}
}
}
return true;
}
}
}
<|start_filename|>IfcSharpCore/ifc_out_cs.cs<|end_filename|>
// ifc_out_cs.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using NetSystem=System;
using System.Collections.Generic;
using System.Collections;
using System.Globalization;
using System.IO;
using Threading=System.Threading;
using System.Reflection;
using System.Text;
using System.Xml;
namespace ifc{//==============================
public partial class ENTITY{//==========================================================================================
protected static int cnt=0;
public string EntityVarName(int Id,Model CurrentModel){try{if (Id>0) return CurrentModel.EntityDict[Id].ShortTypeName()+Id.ToString(); else return "comment"+ NetSystem.Math.Abs(Id);}catch(NetSystem.Exception ex){throw new NetSystem.Exception(ex.Message+": Id="+Id);} }
public string CsOut(object o,Model CurrentModel){
string s="";
cnt++;
if (o==null) s="null";
else if (o is NetSystem.Enum) {if (o.ToString()=="_NULL") s= "ifc."+o.GetType().Name+"._NULL"; else s="ifc."+o.GetType().Name+"."+o.ToString();}
else if (o is SELECT) { if (((SELECT)o).Id>0) s="new ifc."+o.GetType().Name+"("+EntityVarName(((SELECT)o).Id,CurrentModel)+")";
else if( ((SELECT)o).SelectValue()==null ) s="null";
else s="new ifc."+o.GetType().Name+"(("+((SELECT)o).SelectType()+")"+((SELECT)o).SelectValue().ToString().Replace('\'','"')+")";
}
else if (o is ENTITY) {if (((ENTITY)o).LocalId>0) s=EntityVarName(((ENTITY)o).LocalId,CurrentModel); else s="null";}
else if (o is TypeBase) {TypeBase tb=(TypeBase)o;
if (o is ifc.Logical) {s="(ifc.Logical)";if (o.ToString()=="False)") s+="false"; else s+="true";} else
if (o is ifc.Boolean) {s="(ifc.Boolean)";if (o.ToString()=="False)") s+="false"; else s+="true";} else
if (o is ifc.GloballyUniqueId) s="ifc.GloballyUniqueId.NewId() /*\""+o.ToString()+"\"*/"; else
if (tb.IsNull) s="null"; else
{if (tb.GetBaseType()==typeof(NetSystem.String)) {if ( ((TypeBase)o).IsNull /*.ToString()==""*/) s="null";else s="new ifc."+o.GetType().Name+"("+o.ToString().Replace('\'','"') +")"; }
else if( typeof(IEnumerable).IsAssignableFrom(tb.GetBaseType()))
{ifcListInterface li=(ifcListInterface)tb.GetBaseType();
s="new ifc."+o.GetType().Name+o.ToString();
}
else s="(ifc."+o.GetType().Name+")("+o.ToString()+")";
}
}
else if (o is NetSystem.String) {if (o.ToString()=="") s="null";else s="\""+o.ToString()+"\"";}
else if( typeof(IEnumerable).IsAssignableFrom(o.GetType()))
{bool TypeDisplay=(this.GetType()!=typeof(ifc.CartesianPoint) ) && (this.GetType()!=typeof(ifc.Direction) );
if (TypeDisplay) s+="new "+o.GetType().ToString().Replace("`1[","<").Replace("]",">")+"(";
int pos=0;
foreach (object item in (IEnumerable)o) if (item!=null)
{pos++;if (pos>1) s+=",";
if (item is ENTITY) s+=EntityVarName(((ENTITY)item).LocalId,CurrentModel);
else if (item is SELECT) {if (((SELECT)item).Id>0) s+="new ifc."+item.GetType().Name+"("+EntityVarName(((SELECT)item).Id,CurrentModel)+")"; else CsOut(((SELECT)item).SelectValue(),CurrentModel);}
else if (item is TypeBase) {TypeBase tb=(TypeBase)item;
if (tb.GetBaseType()==typeof(NetSystem.String)) {if (item.ToString()=="") s+=""; /* null */else s+="(ifc."+item.GetType().Name+")"+item.ToString().Replace('\'','"').Replace("\"\"","\""); }
else if( typeof(IEnumerable).IsAssignableFrom(tb.GetBaseType())) {s+="new ifc."+item.GetType().Name+item.ToString();}
else {if (TypeDisplay) s+="(ifc."+item.GetType().Name+")("+item.ToString()+")"; else s+=item.ToString(); }
}
else if (item is double ) {s+=item.ToString(); }
else throw new ifc.Exception("CsOut: unknown enumerable-type");
}
if (TypeDisplay) s+=")";
}
else s=o.ToString();
return s;
}
public virtual string ToCs(Model CurrentModel){
Threading.Thread.CurrentThread.CurrentCulture=CultureInfo.InvariantCulture;
string s="";
string ElementName=this.GetType().ToString();//.Replace("IFC4","ifc");
int ElementNameMaxSize=35;
if (ElementName.Length<ElementNameMaxSize) ElementName+=new string(' ',ElementNameMaxSize-ElementName.Length);
int IdStringMaxSize=ElementNameMaxSize+4;
string IdString=EntityVarName(this.LocalId,CurrentModel);
if (IdString.Length<IdStringMaxSize) IdString+=new string(' ',IdStringMaxSize-IdString.Length);
if (this is ifc.EntityComment) s=new string(' ',IdStringMaxSize)+" new "+ElementName+"(";
else s="var "+IdString+"=new "+ElementName+"(";
int VarInsert=s.Length;
if (VarInsert<4) VarInsert=4;
if (VarInsert>3) VarInsert-=3;
bool CR=true;
if (this.GetType()==typeof(ifc.CartesianPoint) )
{//-------------------------------------------------------------------------------------------
ifc.CartesianPoint p=(ifc.CartesianPoint)this; CR=false;
s+="x:"+p.x+",y:"+p.y;if (p.Coordinates.Count>2) s+=",z:"+p.z;
}//-------------------------------------------------------------------------------------------
else
if ( this.GetType()==typeof(ifc.Direction) )
{//-------------------------------------------------------------------------------------------
ifc.Direction p=(ifc.Direction)this; CR=false;
s+="x:"+p.x+",y:"+p.y;if (p.DirectionRatios.Count>2) s+=",z:"+p.z;
}//-------------------------------------------------------------------------------------------
else {//-------------------------------------------------------------------------------------------
AttribListType AttribList=TypeDictionary.GetComponents(this.GetType()).AttribList;
int sep=0;foreach (AttribInfo attrib in AttribList) {//bool optional=((ifcAttribute)field.GetCustomAttributes(inherit:(true))[0]).optional;
bool Cmt=attrib.optional;if (CsOut(attrib.field.GetValue(this),CurrentModel)!="null") Cmt=false;
s+=((++sep>1)?"\r\n"+new string(' ',VarInsert)+(Cmt?"//,":" ,"):"")+attrib.field.Name+":"+CsOut(attrib.field.GetValue(this),CurrentModel);
s+="// #"+attrib.OrdinalPosition;//((ifcAttribute)field.GetCustomAttributes(inherit:(true))[0]).OrdinalPosition;
if (attrib.optional) s+=" [optional]";
if (Cmt) s+=" ("+attrib.field.FieldType.ToString()+")" ;
}
}//-------------------------------------------------------------------------------------------
if (this.EndOfLineComment!=null) if (this.EndOfLineComment.Length>0) s+= "\r\n"+new string(' ',VarInsert)+" ,EndOfLineComment:"+"\""+this.EndOfLineComment+"\"";
if (this is ifc.EntityComment) {s+="\""+((ifc.EntityComment)this).CommentLine.TrimEnd(' ')+'"';CR=false;}
if (CR) s+="\r\n"+new string(' ',VarInsert+2);
return s+=");";// //#"+(this.SortPos);
}
public static Dictionary<int,string> IdVarNameDict=new Dictionary<int,string>(); // contains the assognment of Entity-Ids to variable-names
public static void EvalPdbXml(string PdbFileName,string MethodName,int IdOffset=0)
{
XmlDocument doc=new XmlDocument();
doc.Load(PdbFileName);
foreach (XmlNode node1 in doc.ChildNodes)
{if (node1 is XmlElement) if (node1.Name=="SymbolData")
{foreach (XmlNode node2 in node1.ChildNodes) if (node2 is XmlElement) if (node2.Name=="method") if (node2.Attributes["name"].Value==MethodName)
{foreach (XmlNode node3 in node2.ChildNodes) if (node3 is XmlElement) if (node3.Name=="rootScope")
{foreach (XmlNode node4 in node3.ChildNodes) if (node4 is XmlElement) if (node4.Name=="scope")
foreach (XmlNode node5 in node4.ChildNodes) if (node5 is XmlElement) if (node5.Name=="local") IdVarNameDict.Add(int.Parse(node5.Attributes["ilIndex"].Value),node5.Attributes["name"].Value); // Console.WriteLine(int.Parse(node5.Attributes["ilIndex"].Value)+": "+ node5.Attributes["name"].Value);
}
}
}
}
ifc.Repository.CurrentModel.AssignEntities();
}
}// of ENTITY =========================================================================================================
public partial class Model{//==========================================================================================
public void ToCsFile(string FileName=null)
{
AssignEntities();
SortEntities();
if (FileName==null) FileName=Header.name;
StreamWriter sw=new StreamWriter(FileName+".cs",false,Encoding.Default);
sw.WriteLine("");
sw.WriteLine("// CAUTION! THIS IS A GENERATED FILE! IT WILL BE OVERWRITTEN AT ANY TIME! ");
sw.WriteLine(@"// created with https://github.com/IfcSharp");
sw.WriteLine("");
sw.WriteLine("public class ifcOut{ public static void Generated(){ // ##########################################");
sw.WriteLine("");
sw.WriteLine("ifc.Repository.CurrentModel.ClearEntityList();");
sw.WriteLine("ifc.Repository.CurrentModel.Header.name=\"generated_from_IfcSharp_ifc_Model_ToCsFile()\";");
if (AssignedEntityDict==null) throw new ifc.Exception("AssignedEntityDict is not initialized");
foreach (KeyValuePair<int,ENTITY> kvp in AssignedEntityDict) sw.WriteLine(kvp.Value.ToCs(CurrentModel:this));
sw.WriteLine("");
sw.WriteLine("ifc.Repository.CurrentModel.ToStepFile();");
sw.WriteLine("}/* of void */ } // of class #####################################################################");
sw.WriteLine("");
sw.Close();
}// of ToCsFile
}// of Model ==========================================================================================================
}// ifc==============================
<|start_filename|>IfcSql/sql_schema_for_ifcSQL_Instance.bat<|end_filename|>
sqlcmd -S%SqlServer% -dSchemaEvaluation -Q"EXECUTE [SchemaEvaluation].[dbo].[print_CS] 'ifcSQL',9,'ifc_in_out_sql','_ifcSQL_for_ifcSQL_instance','ifcSQL'" > ifcSQL_for_ifcSQL_Instance_db_generated.cs
pause
<|start_filename|>IfcSharpCore/ifc_sort.cs<|end_filename|>
// ifc_sort.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using NetSystem=System;
using System.Collections.Generic;
using System.Collections;
using System.Reflection;
namespace ifc{//==============================
public partial class ENTITY{//==========================================================================================
public bool IsAssigned=false; // true=alle Entity-Verweise wurden vorher definiert
public int SortPos=0;
}// of ENTITY =========================================================================================================
public partial class Model{//==========================================================================================
private static Dictionary<int,ENTITY> AssignedEntityDict=new Dictionary<int,ENTITY>();
private static int GlobalSortPos=0;
public int SetAssignedEntityForSort()
{
int cnt=0;
foreach (ENTITY e in EntityList) if (!e.IsAssigned)
{e.IsAssigned=true;
//if (e.Id== 103) Console.WriteLine(e.ToIfc());
foreach (FieldInfo field in e.GetType().GetFields(BindingFlags.Public|BindingFlags.Instance|BindingFlags.FlattenHierarchy))
foreach (NetSystem.Attribute attr in field.GetCustomAttributes(true)) if (attr is ifcAttribute) // nur IFC-Atribute
{if (field.FieldType.IsSubclassOf(typeof(ENTITY)))
{ENTITY E=(ENTITY)field.GetValue(e);
// if (E!=null) if (E.Id>0) Console.WriteLine(field.Name+"="+E.Id+" "+E.IsAssigned+" "+AssignedEntityDict.ContainsKey(E.Id));
if (E!=null) if (E.LocalId>0) if (!AssignedEntityDict.ContainsKey(E.LocalId)) e.IsAssigned=false;
// if (e.Id==1) Console.WriteLine(field.Name+" "+e.IsAssigned);
}
else if (field.FieldType.IsSubclassOf(typeof(SELECT)))
{SELECT E=(SELECT)field.GetValue(e);
if (E!=null) if (E.Id>0) if (!AssignedEntityDict.ContainsKey(E.Id)) e.IsAssigned=false;
}
else if( typeof(IEnumerable).IsAssignableFrom(field.FieldType)) if (field.GetValue(e)!=null)
foreach (object item in (IEnumerable)field.GetValue(e)) if (item!=null)
{ if (item is SELECT) if (((SELECT)item).Id>0) if (!AssignedEntityDict.ContainsKey(((SELECT)item).Id)) e.IsAssigned=false;
if (item is ENTITY) if (((ENTITY)item).LocalId>0) if (!AssignedEntityDict.ContainsKey(((ENTITY)item).LocalId)) e.IsAssigned=false;
}
}// of foreach field
// if (e.Id==1)
// Console.WriteLine(e.Id+" IsAssigned="+e.IsAssigned);
if (e.IsAssigned) /* if (!(e is ifc.EntityComment)) */ {e.SortPos=++GlobalSortPos;try{AssignedEntityDict.Add(e.LocalId,e);}catch(NetSystem.Exception ex){throw new NetSystem.Exception(ex.Message+e.ToStepLine());};cnt++;}//Console.WriteLine(cnt+": "+e.Id);}
}//of foreach Entity
//Console.WriteLine("----------------------");
return cnt;
}
public void SortEntities()
{
AssignedEntityDict.Clear();
foreach (ENTITY e in EntityList) e.IsAssigned=false;
GlobalSortPos=0;
int cnt=0; do{cnt=SetAssignedEntityForSort();} while (cnt>0);
bool FirstDisplay=true;
foreach (ENTITY e in EntityList) if (!e.IsAssigned) if (FirstDisplay) {NetSystem.Console.WriteLine("ifc.Model.SortEntities: NOT ASSIGNED: "+e.ToStepLine());FirstDisplay=false;};
//here better an exception
}//of void
}//=====================================================================================================================
}//ifc==============================
<|start_filename|>IfcSharpCore/ifc_base_geometry.cs<|end_filename|>
// ifc_base_geometry.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
namespace ifc{//==============================
public partial class CartesianPoint:Point{//============================================================================
public CartesianPoint(double x, double y, double z,string EndOfLineComment=null):base(){AddNext();this.Coordinates=new List1to3_LengthMeasure((LengthMeasure) x,(LengthMeasure) y,(LengthMeasure) z);this.EndOfLineComment= EndOfLineComment;}
public CartesianPoint(double x, double y ,string EndOfLineComment=null):base(){AddNext();this.Coordinates=new List1to3_LengthMeasure((LengthMeasure) x,(LengthMeasure) y); this.EndOfLineComment= EndOfLineComment;}
public CartesianPoint(CartesianPoint p ):base(){AddNext();this.Coordinates=new List1to3_LengthMeasure((LengthMeasure)p.x,(LengthMeasure)p.y,(LengthMeasure)p.z);this.EndOfLineComment=p.EndOfLineComment;} // issue: need for distinction 2D/3D here
public double x {get{return this.Coordinates[0].TypeValue;}}
public double y {get{return this.Coordinates[1].TypeValue;}}
public double z {get{return this.Coordinates[2].TypeValue;}}
public static CartesianPoint operator - (CartesianPoint p) {return new CartesianPoint( -p.x, -p.y, -p.z);}// issue: need for distinction 2D/3D here
public CartesianPoint Add (double x, double y, double z) {return new CartesianPoint(this.x+x,this.y+y,this.z+z);}// issue: need for distinction 2D/3D here
}//=====================================================================================================================
#if !IFC2X3
public partial class Direction:GeometricRepresentationItem{//===========================================================
public Direction(double x, double y, double z,string EndOfLineComment=null):base(){AddNext();this.DirectionRatios=new List2to3_Real((Real) x,(Real) y,(Real) z);this.EndOfLineComment= EndOfLineComment;}
public Direction(double x, double y ,string EndOfLineComment=null):base(){AddNext();this.DirectionRatios=new List2to3_Real((Real) x,(Real) y); this.EndOfLineComment= EndOfLineComment;}
public Direction(Direction d ):base(){AddNext();this.DirectionRatios=new List2to3_Real((Real)d.x,(Real)d.y,(Real)d.z);this.EndOfLineComment=d.EndOfLineComment;} // issue: need for distinction 2D/3D here
public double x {get{return this.DirectionRatios[0].TypeValue;}}
public double y {get{return this.DirectionRatios[1].TypeValue;}}
public double z {get{return this.DirectionRatios[2].TypeValue;}}
public static Direction operator - (Direction d ) {return new Direction(-d.x,-d.y,-d.z); }// issue: need for distinction 2D/3D here
public static Direction operator + (Direction d1,Direction d2) {return new Direction(d1.x+d2.x,d1.y+d2.y,d1.z+d2.z);}// issue: need for distinction 2D/3D here
public static Direction operator - (Direction d1,Direction d2) {return new Direction(d1.x-d2.x,d1.y-d2.y,d1.z-d2.z);}// issue: need for distinction 2D/3D here
public static Direction operator * (Direction d, double Scale) {return new Direction(d.x*Scale,d.y*Scale,d.z*Scale);}// issue: need for distinction 2D/3D here
}//=====================================================================================================================
#endif
public partial class Axis2Placement3D:Placement{//======================================================================
public Axis2Placement3D(Axis2Placement3D template,string EndOfLineComment=null):base(){AddNext();this.Location=template.Location;this.Axis=template.Axis;this.RefDirection=template.RefDirection;this.EndOfLineComment=EndOfLineComment;}
public Axis2Placement3D Clone(CartesianPoint p,string EndOfLineComment=null) {return new Axis2Placement3D(Location:p,Axis:this.Axis,RefDirection:this.RefDirection,EndOfLineComment:EndOfLineComment);}
}//=====================================================================================================================
/*
public partial class CompoundPlaneAngleMeasure :List3to4<int>{
public CompoundPlaneAngleMeasure (params int[] items):base() {foreach (int e in items) this.Add((int)e);} // INTEGER
}
*/
}// ifc==============================
<|start_filename|>IfcSharpCore/ifc_base_entity.cs<|end_filename|>
// ifc_base_entity.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using NetSystem=System;
public class ifcAttribute:System.Attribute{public ifcAttribute(int OrdinalPosition,bool optional=false,bool derived=false){this.OrdinalPosition=OrdinalPosition;this.optional=optional;this.derived=derived;}
public int OrdinalPosition=0;
public bool optional=false; // ---> $
public bool derived=false; // ToDo ---> *
}
public class ifcInverseAttribute:System.Attribute{public ifcInverseAttribute(string For){this.For=For;}
public string For="";
}
namespace ifc{//==============================
public partial class ENTITY:ifcSqlType{//==========================================================================================
public ENTITY(){}
public int LocalId=0;
public string IfcId() => (this.LocalId==0)?"*":"#"+this.LocalId;
public override string ToString() => IfcId();
public string ShortTypeName() => this.GetType().ToString().Replace("IFC4","ifc").Replace("ifc.","");
//public int SqlTypeId() => ((ifc.ifcSqlAttribute)this.GetType().GetCustomAttributes(true)[0]).SqlTypeId;
public string EndOfLineComment=null;
public long ifcSqlGlobalId=0;
public virtual void AssignInverseElements(){}
}//=====================================================================================================================
// static:
public partial class ENTITY:ifcSqlType{//==========================================================================================
static public int NextGlobalId=1;
protected void AddNext(){LocalId=NextGlobalId++;/*NetSystem.Console.WriteLine(this.ToStepLine());*/ Repository.CurrentModel.EntityList.Add(this);}
protected virtual void CheckValues(){}
protected virtual void SetDefaultValues(){}
static public int NextGlobalCommentId=0;
protected void AddNextCommentLine(){LocalId=NextGlobalCommentId--;Repository.CurrentModel.EntityList.Add(this);}
}//=====================================================================================================================
[ifcSql(TypeGroupId:5,TypeId:-1)] public partial class EntityComment:ENTITY{//==========================================================================================
public EntityComment(){}
public EntityComment(string CommentLine){AddNextCommentLine();this.CommentLine=CommentLine;if (this.CommentLine.Length<74) this.CommentLine+=new string(' ',74-this.CommentLine.Length);}
public EntityComment(string CommentLine,char FrameChar){this.CommentLine=CommentLine;if (this.CommentLine.Length<74) this.CommentLine+=new string(' ',74-this.CommentLine.Length);
new EntityComment(new string(' ',74));
new EntityComment(new string(FrameChar,74));
AddNextCommentLine();
new EntityComment(new string(FrameChar,74));
new EntityComment(new string(' ',74));
}
public string CommentLine="no comment";
public override string ToString(){return CommentLine;}
public static int HtmlCnt=0;
}//=====================================================================================================================
#if !IFC2X3
public partial class CartesianTransformationOperator3DnonUniform:CartesianTransformationOperator3D{//===================
protected override void CheckValues(){} // Check >0
protected override void SetDefaultValues(){
if (Scale==null) Scale=new ifc.Real(1);
if (Scale2==null) Scale2=Scale;
if (Scale3==null) Scale3=Scale;
if (Axis1==null) Axis1=new ifc.Direction(x:1,y:0,z:0);
if (Axis2==null) Axis2=new ifc.Direction(x:0,y:1,z:0);
if (Axis3==null) Axis3=new ifc.Direction(x:0,y:0,z:1);
}
}//=====================================================================================================================
#endif
}// ifc=======================================
<|start_filename|>IfcSqlite/ifc_sqlite_database.cs<|end_filename|>
// ifc_sqlite_database.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SQLite;
using System.Diagnostics;
using System.IO;
using System.Linq;
using NetSystem = System;
namespace ifc
{
class SQLiteDatabase : IDisposable
{
private SQLiteConnection Connection = null;
private SQLiteTransaction ActiveTransaction = null;
public SQLiteDatabase()
{
}
public SQLiteDatabase(string fullPath)
{
Console.WriteLine(string.Format("{0}: Creating Database '{1}'", NetSystem.DateTime.Now.ToString("HH:mm:ss.ffff"), fullPath));
InitDatabase(fullPath);
}
public void ConnectToDatabase(string fullPath)
{
if (this.Connection != null)
{
CloseConnection();
this.Connection = null;
}
string connection = string.Format("Data Source={0};Version=3;", fullPath);
this.Connection = new SQLiteConnection(connection);
}
public int InitDatabase(string fullPath)
{
// if the File already exists, check if it´s in use
if (File.Exists(fullPath))
{
if (IsFileLocked(fullPath))
{
string msg = "The SQLite-File is locked.\nChange the Filename or close the locking application and try again.";
throw new IOException(msg);
}
}
// create the database-file
try
{
SQLiteConnection.CreateFile(fullPath);
// connect to the database and open the connection
string connection = string.Format("Data Source={0};Version=3;", fullPath);
this.Connection = new SQLiteConnection(connection);
//this.Connection.Open();
//// due to the m:n relation of certain tables
//// we need to turn on FOREIGN-Keys
//SQLiteCommand command = new SQLiteCommand(this.Connection);
//BeginTransaction();
//command.CommandText = "PRAGMA foreign_keys = ON;";
//command.ExecuteNonQuery();
//CommitTransaction();
//CloseConnection();
}
catch (Exception e)
{
Debug.WriteLine(e.Message + "\n" + e.StackTrace);
CancelChanges();
CloseConnection();
return 0;
}
return 1;
}
public static bool IsFileLocked(string filePath)
{
FileStream stream = null;
FileInfo fileInfo = new FileInfo(filePath);
try
{
stream = fileInfo.Open(FileMode.Open, FileAccess.Read, FileShare.None);
}
catch (IOException)
{
//the file is unavailable because it is:
//still being written to
//or being processed by another thread
//or does not exist (has already been processed)
return true;
}
finally
{
if (stream != null)
stream.Close();
}
//file is not locked
return false;
}
public void Dispose()
{
this.Connection.Close();
this.Connection.Dispose();
}
/// <summary>
/// Clears the table.
/// </summary>
/// <param name="tableName">Name of the table.</param>
/// <returns></returns>
public int TruncateTable(string tableName)
{
if (this.Connection.State != NetSystem.Data.ConnectionState.Open)
{
this.Connection.Open();
}
int returnValue = 0;
SQLiteCommand command = new SQLiteCommand(string.Format("DELETE FROM {0};", tableName), this.Connection);
try
{
returnValue = command.ExecuteNonQuery();
}
catch (SQLiteException sqle)
{
// Handle DB exception
if (sqle.ErrorCode != (int)SQLiteErrorCode.Error)
Debug.WriteLine(sqle.ResultCode.ToString());
}
finally
{
this.Connection.Close();
}
return returnValue;
}
public void FillFromDataSet(SQLiteDataSet dataSet)
{
Console.WriteLine(string.Format("{0}: Filling Database", NetSystem.DateTime.Now.ToString("HH:mm:ss.ffff")));
try
{
OpenConnection();
BeginTransaction();
foreach (SQLiteDataTable table in dataSet.Tables)
{
// first, create the table with its columns
CreateTable(table);
// second, add all entries from the DatabaseTable-Object
InsertTable(table);
}
CommitTransaction();
CloseConnection();
}
catch (Exception e)
{
string msg = "Exception in 'CreateDatabase(SqliteDataSet dataSet)':\n" + e.Message;
Debug.WriteLine(msg);
CloseConnection();
}
}
/// <summary>
/// Cancels the changes.
/// </summary>
/// <returns></returns>
public int CancelChanges()
{
if (this.Connection != null && this.Connection.State == NetSystem.Data.ConnectionState.Open)
{
try
{
this.Connection.Cancel();
}
catch (Exception e)
{
Debug.WriteLine(e.Message);
}
return 1;
}
return 0;
}
/// <summary>
/// Closes the connection.
/// </summary>
/// <returns></returns>
public int CloseConnection()
{
if (this.Connection == null)
{
Debug.WriteLine("Error in 'OpenConnection()': this.DatabaseConnection == null");
return 0;
}
if (this.Connection.State == NetSystem.Data.ConnectionState.Open)
this.Connection.Close();
return 1;
}
/// <summary>
/// Begins the transaction.
/// </summary>
/// <returns></returns>
private int BeginTransaction()
{
if (this.Connection == null)
{
Debug.WriteLine("Error in 'BeginTransaction()': this.DatabaseConnection == null");
return 0;
}
if (this.Connection.State == NetSystem.Data.ConnectionState.Closed)
OpenConnection();
try
{
this.ActiveTransaction = this.Connection.BeginTransaction();
}
catch (Exception e)
{
Debug.WriteLine("Exception in 'BeginTransaction()': " + e.Message);
return 0;
}
return 1;
}
/// <summary>
/// Commits the transaction.
/// </summary>
/// <returns></returns>
private int CommitTransaction()
{
if (this.ActiveTransaction == null)
{
Debug.WriteLine("Error in 'CommitTransaction()': this.DatabaseConnection == null");
return 0;
}
this.ActiveTransaction.Commit();
return 1;
}
internal void InsertDataSet(SQLiteDataSet dataSet)
{
OpenConnection();
BeginTransaction();
foreach (SQLiteDataTable table in dataSet.Tables)
InsertTable(table);
CommitTransaction();
CloseConnection();
}
/// <summary>
/// Opens the connection.
/// </summary>
/// <returns></returns>
private int OpenConnection()
{
if (this.Connection == null)
{
Debug.WriteLine("Error in 'CommitTransaction()': this.DatabaseConnection == null");
return 0;
}
if (this.Connection.State != ConnectionState.Open)
this.Connection.Open();
return 1;
}
public DataSet GetContentAsDataSet(string fullPath)
{
DataSet dataSet = new DataSet();
ConnectToDatabase(fullPath);
OpenConnection();
// first we get a list of all tables from the db
SQLiteCommand command = new SQLiteCommand("SELECT name FROM sqlite_master WHERE type = 'table';", this.Connection);
SQLiteDataReader dataReader = command.ExecuteReader();
while (dataReader.Read())
{
string tableName = dataReader.GetString(0);
SQLiteDataAdapter dataAdapter = new SQLiteDataAdapter(string.Format("SELECT * From {0}", tableName), this.Connection);
dataAdapter.FillSchema(dataSet, SchemaType.Source, tableName);
dataAdapter.Fill(dataSet, tableName);
}
return dataSet;
}
private string GetColumnAttributes(SQLiteDataTable table)
{
List<string> AttributePair = table.Rows[0].Fields.Select(p => string.Format("'{0}' {1} {2}", p.Parameter.ParameterName, p.Parameter.DbType, p.Parameter.IsNullable ? "NULL" : "NOT NULL")).ToList();
string attributes = string.Join(",", AttributePair);
if (attributes == "") attributes = "EMPTY_TABLE";
else attributes += ", CONSTRAINT id_pk PRIMARY KEY (Id)";
return attributes;
}
private int CreateTable(SQLiteDataTable table)
{
int returnValue = 0;
SQLiteCommand sqlCommand = new SQLiteCommand(this.Connection);
string commandText = string.Format("CREATE TABLE IF NOT EXISTS '{0}' ({1});", table.Name, GetColumnAttributes(table));
try
{
sqlCommand.CommandText = commandText;
sqlCommand.ExecuteNonQuery();
}
catch (Exception e)
{
string msg = string.Format("Exception in executing command: '{0}':\n{1}", commandText, e.Message);
Debug.WriteLine(msg);
returnValue = -1;
}
return returnValue;
}
private void InsertTable(SQLiteDataTable table)
{
if (table != null)
foreach (SQLiteDataRow row in table.Rows)
InsertRow(table.Name, row);
}
private void InsertRow(string tableName, SQLiteDataRow row)
{
List<string> valueNames = row.Fields.Select(p => p.Parameter.ParameterName).ToList();
List<string> valuePlaceholders = row.Fields.Select(p => "@" + p.Parameter.ParameterName).ToList();
List<object> values = row.Fields.Select(p => p.Parameter.Value).ToList();
// we dont insert when: no ValueNames are given _OR_ all Values are "NULL"
if (valueNames.Count <= 0 || values.All(item => item == null))
return;
string commandText = string.Format("INSERT INTO {0} ({1}) VALUES ({2})", tableName, string.Join(",", valueNames), string.Join(",", valuePlaceholders));
try
{
SQLiteCommand command = new SQLiteCommand(commandText, this.Connection);
for (int i = 0; i < values.Count; i++)
command.Parameters.AddWithValue(valuePlaceholders[i], values[i]);
int retVal = command.ExecuteNonQuery();
}
catch (SQLiteException sqliteExc)
{
string msg = string.Format("Error on 'INSERT INTO {0}': {1}", tableName, sqliteExc.Message.Split('\n')[1]);
Console.WriteLine(msg);
Debug.WriteLine(msg);
}
catch (Exception e)
{
string msg = string.Format("Exception in executing command: '{0}':\n{1}", commandText, e.Message);
Debug.WriteLine(msg);
}
}
#region DataSet-Interface
public static Dictionary<Type, DbType> TypeToDbType = new Dictionary<Type, DbType>
{
{typeof(byte) , DbType.Byte },
{typeof(sbyte) , DbType.SByte},
{typeof(short) , DbType.Int16},
{typeof(ushort) , DbType.UInt16},
{typeof(int) , DbType.Int32},
{typeof(uint) , DbType.UInt32},
{typeof(long) , DbType.Int64},
{typeof(ulong) , DbType.UInt64},
{typeof(float) , DbType.Single},
{typeof(double) , DbType.Double},
{typeof(decimal) , DbType.Decimal},
{typeof(bool) , DbType.Boolean},
{typeof(string) , DbType.String},
{typeof(char) , DbType.StringFixedLength},
{typeof(Guid) , DbType.Guid},
{typeof(DateTime) , DbType.DateTime},
{typeof(DateTimeOffset) , DbType.DateTimeOffset},
};
public void CreateDatabase(DataSet dataSet)
{
try
{
OpenConnection();
BeginTransaction();
foreach (DataTable dt in dataSet.Tables)
{
CreateTable(dt);
//SQLiteDataAdapter dataAdapter = new SQLiteDataAdapter(string.Format("CREATE TABLE IF NOT EXISTS '{0}' ({1});", dt.TableName, string.Join(",", dt.Columns))), this.Connection);
//dataAdapter.AcceptChangesDuringFill = false;
//dataAdapter.Fill(dt);
}
CommitTransaction();
CloseConnection();
}
catch (Exception e)
{
string msg = "Exception in 'CreateDatabase(DataSet dataSet)':\n" + e.Message;
Debug.WriteLine(msg);
//CloseConnection();
}
}
private string GetColumnAttributes(DataTable table)
{
List<string> AttributePair = new List<string>();
foreach (DataColumn c in table.Columns)
AttributePair.Add(string.Format("'{0}' {1} {2}", c.ColumnName, TypeToDbType[c.DataType], c.AllowDBNull ? "NULL" : "NOT NULL"));
string attributes = string.Join(",", AttributePair);
if (attributes == "") attributes = "EMPTY_TABLE";
else attributes += "CONSTRAINT id_pk PRIMARY KEY (Id)";
return attributes;
}
private int CreateTable(DataTable table)
{
int returnValue = 0;
SQLiteCommand sqlCommand = new SQLiteCommand(this.Connection);
string commandText = string.Format("CREATE TABLE IF NOT EXISTS '{0}' ({1});", table.TableName, GetColumnAttributes(table));
try
{
sqlCommand.CommandText = commandText;
sqlCommand.ExecuteNonQuery();
}
catch (Exception e)
{
string msg = string.Format("Exception in executing command: '{0}':\n{1}", commandText, e.Message);
Debug.WriteLine(msg);
returnValue = -1;
}
return returnValue;
}
#endregion
}
}
<|start_filename|>IfcSharpCore/ifc_out_html.cs<|end_filename|>
// ifc_out_html.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
using System.Collections.Generic;
using System.Collections;
using System.Globalization;
using System.IO;
using Threading=System.Threading;
using System.Reflection;
using NetSystem=System;
namespace ifc{//==============================
public partial class ENTITY{//==========================================================================================
public static string HtmlRefOut(string expr)
{
List<int> SharpList=new List<int>();
if (expr.Contains("#"))
{for (int pos=0;pos<expr.Length;pos++) if (expr[pos]=='#') SharpList.Add(pos);
string NewExpr=expr;
foreach (int SharpPos in SharpList) {int pos=SharpPos;while ( (pos<expr.Length) && (expr[pos]!=',') && (expr[pos]!=')') ) pos++;
string link="-"; if (pos>SharpPos) link=expr.Substring(SharpPos,pos-SharpPos);
string NewLink="<a href=\""+link+"\" class=\"ref\">"+link+"</a>";
NewExpr=NewExpr.Replace(link, NewLink);
}
expr=NewExpr;
}
return expr;
}
public static string HtmlRefOut(FieldInfo field,string IfcId,ENTITY e){string RefClassName="ref";if (NameKeywDict.ContainsKey(e.ShortTypeName())) RefClassName+=" keyw"+NameKeywDict[e.ShortTypeName()];return "<a href=\""+IfcId+"\" class=\""+RefClassName+"\">"+IfcId+"</a>";}
public static string HtmlOut(FieldInfo field,string ClassName, string value){return "<span class=\""+ClassName+"\">"+value+"</span>";}
public static string HtmlNullOut(FieldInfo field,bool IsDerived){return "<span class=\"dollar\">"+((IsDerived)?"*":"$")+"</span>";}
public static string HtmlEnumOut(FieldInfo field,string value){return "<span class=\"enum\" title=\""+field.FieldType.Name+" "+field.Name+"\" >."+value+".</span>";}
public static string HtmlTextOut(FieldInfo field,string value){return "<span class=\"text\">"+value+"</span>";}
public static string HtmlOut(FieldInfo field,object o,bool IsDerived){
string s="";
if (o==null) { s=HtmlNullOut(field,IsDerived);}
else if (o is Enum) {/*if (o.ToString()=="_NULL") s=HtmlNullOut(field,IsDerived); else */ s=HtmlEnumOut(field,o.ToString());}
else if (o is SELECT) {if ( ((SELECT)o).IsNull) s=HtmlNullOut(field,IsDerived);
else { if (((SELECT)o).Id==0) if ( ((SELECT)o).SelectType().IsSubclassOf(typeof(ENTITY)) ) { ((SELECT)o).Id=((ENTITY)((SELECT)o).SelectValue()).LocalId; }
if (((SELECT)o).Id>0) s=HtmlRefOut(field,"#"+((SELECT)o).Id.ToString(),(ENTITY)(((SELECT)o).SelectValue()));
else s="IFC"+((SELECT)o).SelectType().Name.ToUpper()+"("+HtmlOut(field,((SELECT)o).SelectValue().ToString(),IsDerived)+")";
}
}
else if (o is ENTITY) if ( ((ENTITY)o).LocalId==0 ) s=HtmlNullOut(field,IsDerived); else s=HtmlRefOut(field,((ENTITY)o).IfcId(),(ENTITY)o);
else if (o is TypeBase) {TypeBase tb=(TypeBase)o;if (tb.GetBaseType()==typeof(String)) {if (o.ToString()=="" || o.ToString()=="null") s=HtmlNullOut(field,IsDerived);else s=HtmlTextOut(field,o.ToString()); } else {if (o.ToString()=="null") s=HtmlNullOut(field,IsDerived);else s=HtmlOut(field,"float",o.ToString());} }
else if (o is String) {if (o.ToString()=="") s=HtmlNullOut(field,IsDerived);else s=HtmlOut(field,"text",o.ToString());}
else if( typeof(IEnumerable).IsAssignableFrom(o.GetType())) {s=HtmlOut(field,"list",HtmlRefOut(o.ToString()));}
else {if (o.ToString()=="null") s=HtmlNullOut(field,IsDerived); else s=HtmlOut(field,"int",o.ToString());}
return s;
}
public static Dictionary<string, int> NameKeywDict=new Dictionary<string,int>();
public virtual string ToHtml(){
Threading.Thread.CurrentThread.CurrentCulture=CultureInfo.InvariantCulture;
string ElementName=this.GetType().ToString().Replace("IFC4","ifc").Replace("ifc.","");
string EntityClassName="entity";if (NameKeywDict.ContainsKey(ElementName)) EntityClassName+=" keyw"+NameKeywDict[ElementName];
string IdClassName="id"; if (NameKeywDict.ContainsKey(ElementName)) IdClassName+=" keyw"+NameKeywDict[ElementName];
string Args="(";
AttribListType AttribList=TypeDictionary.GetComponents(this.GetType()).AttribList;
int sep=0;foreach (AttribInfo attrib in AttribList) Args+=((++sep>1)?",":"")+attrib.field.FieldType.Name +attrib.field.Name;
Args+=")";
string s="\r\n<div class=\"line"+(ifc.EntityComment.HtmlCnt%4) +"\"><a name=\""+this.LocalId.ToString()+"\"/><span class=\""+IdClassName+"\">"+"<a href=\"#"+this.LocalId.ToString()+"\">#"+ this.LocalId.ToString()+"</a></span><span class=\"equal\">=</span><span class=\"ifc\">ifc</span><span class=\""
+EntityClassName+"\" title=\"ifc"+ElementName
+Args
+"\">"+ElementName+"</span>(";
sep=0;
if (this is CartesianPoint) {CartesianPoint cp=(CartesianPoint)this;string coords="";foreach (LengthMeasure lm in cp.Coordinates ) coords+=((++sep>1)?",":"")+((double)lm).ToString("#0.0000"); s+=HtmlOut(AttribList[0].field,"list",coords);}
else if (this is Direction ) {Direction cp=(Direction) this;string coords="";foreach (Real lm in cp.DirectionRatios) coords+=((++sep>1)?",":"")+((double)lm).ToString("#0.0000"); s+=HtmlOut(AttribList[0].field,"list",coords);}
else foreach (AttribInfo attrib in AttribList) s+=((++sep>1)?",":"")+HtmlOut(attrib.field,attrib.field.GetValue(this),attrib.IsDerived);
s+=")<span class=\"semik\">;</span>";
if (EndOfLineComment!=null) s+="<span class=\"EndOfLineComment\">/* "+EndOfLineComment+" */</span>";
s+="<br/></div>";
return s;
}
}//of ENTITY ==========================================================================================================
public partial class EntityComment:ENTITY{//==========================================================================================
public override string ToHtml(){HtmlCnt++;return "\r\n<span class=\"Commentline\">/* "+CommentLine+" */</span><br/>";}
}//=====================================================================================================================
public partial class Model{//==========================================================================================
private static string FormattedHeaderLine(string line){return "<span class=\"header\">"+line+"</span>"+"<br/>";}
public void ToHtmlFile()
{
StreamWriter sw=new StreamWriter(Header.name+".html");
//Console.WriteLine("Start ToHtmlFile");
sw.WriteLine("<html>");
sw.WriteLine("<head>");
sw.WriteLine("<title>ifc</title>");
//sw.WriteLine("<link rel=\"stylesheet\" type=\"text/css\" href=\"ifc.css\"/>");
sw.WriteLine("<style>");
sw.WriteLine(".global{");
sw.WriteLine(" background-color: #FFFFEE;");
sw.WriteLine(" font-size: 10pt;");
sw.WriteLine(" font-family: Courier New;");
sw.WriteLine(" margin: 1em; padding: 0.5em;");
sw.WriteLine("}");
sw.WriteLine(" .id {color: red;}");
sw.WriteLine(" .ifc {color: gray;}");
sw.WriteLine(" .commentline {color: black; background-color:white; font-weight: bold;white-space: pre;text-decoration: underline;}");
sw.WriteLine(" .EndOfLineComment {color: black; background-color:white; font-weight: bold;white-space: pre;}");
sw.WriteLine(" .ref {color: blue;text-decoration: underline;}");
sw.WriteLine(" .entity {color: navy;font-weight:bold;}");
sw.WriteLine(" .text {color: maroon; font-weight:bold;}");
sw.WriteLine(" .dollar {color: gray; }");
sw.WriteLine(" .float {color: purple; font-weight:bold;}");
sw.WriteLine(" .int {color: teal;}");
sw.WriteLine(" .list {color: black;}");
sw.WriteLine(" .guid {color: orange;}");
sw.WriteLine(" .enum {color: green;font-weight:bold;}");
sw.WriteLine(" .header {color: darkcyan;}");
sw.WriteLine(" .equal {color: darkgray;}");
sw.WriteLine(" .semik {color: olive;}");
sw.WriteLine(" .keyw0 {background-color:#A0FFFF;}");
sw.WriteLine(" .keyw1 {background-color:#FFA0FF;}");
sw.WriteLine(" .keyw2 {background-color:#FFFFA0;}");
sw.WriteLine(" .keyw3 {background-color:#A0FFA0;}");
sw.WriteLine(" .line0 {background-color:#F0FFFF;}");
sw.WriteLine(" .line1 {background-color:#FAFAFA;}");
sw.WriteLine(" .line2 {background-color:#FFFFF8;}");
sw.WriteLine(" .line3 {background-color:#F0FFF0;}");
sw.WriteLine("</style>");
sw.WriteLine("</head>");
sw.WriteLine("<body>");
sw.WriteLine("<div class=\"global\">");
//foreach (string s in Header) sw.WriteLine("<span class=\"header\">"+s+"</span>"+"<br/>");
sw.WriteLine(FormattedHeaderLine("ISO-10303-21;"));
sw.WriteLine(FormattedHeaderLine("HEADER;"));
sw.WriteLine(FormattedHeaderLine("FILE_DESCRIPTION (('"+Header.description+"'), '2;1');"));
sw.WriteLine(FormattedHeaderLine("FILE_NAME ('"+Header.name+"', '"+NetSystem.String.Format("{0:s}",NetSystem.DateTime.Now)+"', ('"+Header.author+"'), ('"+Header.organization+"'), '"+ Header.preprocessor_version+"', '"+Header.originating_system+"', '"+Header.authorization+"');"));
sw.WriteLine(FormattedHeaderLine("FILE_SCHEMA (('"+ifc.Specification.SchemaName+"'));"));
sw.WriteLine(FormattedHeaderLine("ENDSEC;"));
sw.WriteLine(FormattedHeaderLine("DATA;"));
//foreach (KeyValuePair<int,ENTITY> kvp in AssignedEntityDict) sw.Write(kvp.Value.ToHtml()+"<br/>");
foreach (ENTITY e in EntityList) sw.Write(e.ToHtml());
sw.WriteLine(FormattedHeaderLine("ENDSEC;"));
sw.WriteLine(FormattedHeaderLine("END-ISO-10303-21;"));
sw.WriteLine("</div>"); // pre
sw.WriteLine("</body>");
sw.WriteLine("</html>");
sw.Close();
}// of ToHmlFile
}// of Model ==========================================================================================================
}//ifc ==============================
<|start_filename|>IfcStep/ifc_out_step.cs<|end_filename|>
// ifc_out_step.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
using System.Collections.Generic;
using System.Collections;
using System.Globalization;
using System.Reflection;
using Threading=System.Threading;
using System.IO;
using NetSystem=System;
namespace ifc{//###################################################################################
public partial class ENTITY{//=====================================================================
public static string StepAttributeOut(object o,AttribInfo attrib=null){//-------------------------------------------------
string s;//="";
if (o==null) {if (attrib==null) s="$"; else s=((attrib.IsDerived)?"*":"$");}
else if (o is Enum) {s="."+o.ToString()+".";}
else if (o is SELECT) {if ( ((SELECT)o).IsNull) s="$";
else { if (((SELECT)o).SelectValue() is ENTITY) s=((SELECT)o).SelectValue().ToString();
else s="IFC"+((SELECT)o).SelectType().Name.ToUpper()+"("+((SELECT)o).SelectValue().ToString()+")";
}
}
else if (o is ENTITY) s=((ENTITY)o).IfcId();
else if (o is TypeBase) s=o.ToString();
else if( typeof(IEnumerable).IsAssignableFrom(o.GetType())) s=o.ToString();
else s=o.ToString();
return s;
}//------------------------------------------------------------------------------------------------
public virtual string ToStepLine(){//--------------------------------------------------------------
string s=this.IfcId()+"=IFC"+this.GetType().Name.ToUpper()+"(";
AttribListType AttribList=TypeDictionary.GetComponents(this.GetType()).AttribList;
int sep=0;foreach (AttribInfo attrib in AttribList) s+=((++sep>1)?",":"")+StepAttributeOut(attrib.field.GetValue(this),attrib);
s+=");";
if (EndOfLineComment!=null) s+="/* "+EndOfLineComment+" */";
return s;
}//------------------------------------------------------------------------------------------------
}//================================================================================================
public partial class EntityComment:ENTITY{//=======================================================
public override string ToStepLine() {return "/* "+CommentLine+" */";}
}//================================================================================================
public partial class Model{//======================================================================
public void ToStepFile(string filePath="")//-------------------------------------------------------------------------
{
Threading.Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;
foreach (ENTITY e in EntityList) if (e is ifc.Root) if (((ifc.Root)e).GlobalId == null) ((ifc.Root)e).GlobalId = ifc.GloballyUniqueId.NewId();
//EF-2021-03-02: added support for the definition of a filepath, if the filepath is omitted, we use the headername of the Model
if (string.IsNullOrEmpty(filePath)) filePath = Header.name + ".ifc";
StreamWriter sw = new StreamWriter(filePath);
sw.WriteLine("ISO-10303-21;");
sw.WriteLine("HEADER;");
sw.WriteLine("FILE_DESCRIPTION (('" + Header.description + "'), '2;1');");
sw.WriteLine("FILE_NAME ('" + Header.name + "', '" + NetSystem.String.Format("{0:s}", NetSystem.DateTime.Now) + "', ('" + Header.author + "'), ('" + Header.organization + "'), '" + Header.preprocessor_version + "', '" + Header.originating_system + "', '" + Header.authorization + "');");
sw.WriteLine("FILE_SCHEMA (('" + Specification.SchemaName + "'));");
sw.WriteLine("ENDSEC;");
sw.WriteLine("DATA;");
foreach (ENTITY e in EntityList) sw.WriteLine(e.ToStepLine());
sw.WriteLine("ENDSEC;");
sw.WriteLine("END-ISO-10303-21;");
sw.Close();
}// of ToStepFile ---------------------------------------------------------------------------------
}// of Model ======================================================================================
}// ifc ###########################################################################################
<|start_filename|>IfcSharpCore/ifc_base_select.cs<|end_filename|>
// ifc_base_select.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
namespace ifc{//==============================
public class SELECT:ifcSqlType{//--------------------------------
public Type SelectType(){return _Type;}
public object SelectValue(){return _SelectValue;}
public void SetValueAndType(object _SelectValue,Type _Type){this._SelectValue=_SelectValue;this._Type=_Type;}
protected object _SelectValue;
protected Type _Type;
private int _Id=0;
public int Id{get{return _Id;}set{ IsNull=(value==0);_Id = value;}}
}//---------------------------------------------------
}// ifc=======================================
<|start_filename|>IfcSharpCore/ifc_header.cs<|end_filename|>
// ifc_header.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
namespace ifc{//==============================
public class HeaderData{//==============================================================================================
public string description=ifc.Specification.SchemaName;// e.g. ViewDefinition[CoordinationView], only STEP, not ifcXML
public string name="";// FileName
public string time_stamp="";
public string author="<NAME>, <NAME>";
public string organization=@"https://github.com/IfcSharp";
public string preprocessor_version=@"https://github.com/IfcSharp";
public string originating_system="";
public string authorization="";
public string documentation="";
public void Init(string name,string description, string author,string preprocessor_version)
{this.name=name;this.description=description;this.author=author;this.preprocessor_version=preprocessor_version;}
public void Reset(){description=ifc.Specification.SchemaName;name="IfcSharp";time_stamp="";author="<NAME>, <NAME>";organization=@"https://github.com/IfcSharp";preprocessor_version="";originating_system="";authorization=@"https://github.com/IfcSharp";documentation=""; }
}//====================================================================================================================
}// ifc=======================================
<|start_filename|>IfcSharpCore/ifc_base_units.cs<|end_filename|>
// ifc_base_unit.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
public class units{
public static double mm2m=1.0/1000.0;
public static double m2mm=1000.0;
}
namespace ifc{//==============================
public class ifcUnitAtribute:Attribute{public ifcUnitAtribute(UnitEnum eUnitEnum=UnitEnum.USERDEFINED,DerivedUnitEnum eDerivedUnitEnum=DerivedUnitEnum.USERDEFINED){this.eUnitEnum=eUnitEnum;this.eDerivedUnitEnum=eDerivedUnitEnum;}
public UnitEnum eUnitEnum=UnitEnum.USERDEFINED;
public DerivedUnitEnum eDerivedUnitEnum=DerivedUnitEnum.USERDEFINED;
}
public interface ifcUnitInterface{
bool IsUnitEnum();
bool IsDerivedUnitEnum();
UnitEnum eUnitEnum();
DerivedUnitEnum eDerivedUnitEnum();
}
public partial class DimensionalExponents:ENTITY{
public DimensionalExponents(SIUnitName n, int LengthExponent,int MassExponent,int TimeExponent,int ElectricCurrentExponent,int ThermodynamicTemperatureExponent,int AmountOfSubstanceExponent,int LuminousIntensityExponent):base(){/*Id=NextGlobalId++;EntityList.Add(this);*/this.LengthExponent=LengthExponent;this.MassExponent=MassExponent;this.TimeExponent=TimeExponent;this.ElectricCurrentExponent=ElectricCurrentExponent;this.ThermodynamicTemperatureExponent=ThermodynamicTemperatureExponent;this.AmountOfSubstanceExponent=AmountOfSubstanceExponent;this.LuminousIntensityExponent=LuminousIntensityExponent;}
}
public partial class SIUnit:NamedUnit{
/* DERIVED */ public SIUnit(UnitEnum UnitType,SIUnitName Name,SIPrefix? Prefix=null,string EndOfLineComment=null):base(){AddNext();this.Dimensions=DimensionsForSiUnit(Name);this.UnitType=UnitType;this.Prefix=Prefix;this.Name=Name;this.EndOfLineComment=EndOfLineComment;}
DimensionalExponents DimensionsForSiUnit(SIUnitName n)
{
switch (n) {case SIUnitName.METRE :return new DimensionalExponents(n,1, 0, 0, 0, 0, 0, 0);
case SIUnitName.SQUARE_METRE :return new DimensionalExponents(n,2, 0, 0, 0, 0, 0, 0);
case SIUnitName.CUBIC_METRE :return new DimensionalExponents(n,3, 0, 0, 0, 0, 0, 0);
case SIUnitName.GRAM :return new DimensionalExponents(n,0, 1, 0, 0, 0, 0, 0);
case SIUnitName.SECOND :return new DimensionalExponents(n,0, 0, 1, 0, 0, 0, 0);
case SIUnitName.AMPERE :return new DimensionalExponents(n,0, 0, 0, 1, 0, 0, 0);
case SIUnitName.KELVIN :return new DimensionalExponents(n,0, 0, 0, 0, 1, 0, 0);
case SIUnitName.MOLE :return new DimensionalExponents(n,0, 0, 0, 0, 0, 1, 0);
case SIUnitName.CANDELA :return new DimensionalExponents(n,0, 0, 0, 0, 0, 0, 1);
case SIUnitName.RADIAN :return new DimensionalExponents(n,0, 0, 0, 0, 0, 0, 0);
case SIUnitName.STERADIAN :return new DimensionalExponents(n,0, 0, 0, 0, 0, 0, 0);
case SIUnitName.HERTZ :return new DimensionalExponents(n,0, 0, -1, 0, 0, 0, 0);
case SIUnitName.NEWTON :return new DimensionalExponents(n,1, 1, -2, 0, 0, 0, 0);
case SIUnitName.PASCAL :return new DimensionalExponents(n,-1, 1, -2, 0, 0, 0, 0);
case SIUnitName.JOULE :return new DimensionalExponents(n,2, 1, -2, 0, 0, 0, 0);
case SIUnitName.WATT :return new DimensionalExponents(n,2, 1, -3, 0, 0, 0, 0);
case SIUnitName.COULOMB :return new DimensionalExponents(n,0, 0, 1, 1, 0, 0, 0);
case SIUnitName.VOLT :return new DimensionalExponents(n,2, 1, -3, -1, 0, 0, 0);
case SIUnitName.FARAD :return new DimensionalExponents(n,-2, -1, 4, 2, 0, 0, 0);
case SIUnitName.OHM :return new DimensionalExponents(n,2, 1, -3, -2, 0, 0, 0);
case SIUnitName.SIEMENS :return new DimensionalExponents(n,-2, -1, 3, 2, 0, 0, 0);
case SIUnitName.WEBER :return new DimensionalExponents(n,2, 1, -2, -1, 0, 0, 0);
case SIUnitName.TESLA :return new DimensionalExponents(n,0, 1, -2, -1, 0, 0, 0);
case SIUnitName.HENRY :return new DimensionalExponents(n,2, 1, -2, -2, 0, 0, 0);
case SIUnitName.DEGREE_CELSIUS :return new DimensionalExponents(n,0, 0, 0, 0, 1, 0, 0);
case SIUnitName.LUMEN :return new DimensionalExponents(n,0, 0, 0, 0, 0, 0, 1);
case SIUnitName.LUX :return new DimensionalExponents(n,-2, 0, 0, 0, 0, 0, 1);
case SIUnitName.BECQUEREL :return new DimensionalExponents(n,0, 0, -1, 0, 0, 0, 0);
case SIUnitName.GRAY :return new DimensionalExponents(n,2, 0, -2, 0, 0, 0, 0);
case SIUnitName.SIEVERT :return new DimensionalExponents(n,2, 0, -2, 0, 0, 0, 0);
default :return new DimensionalExponents(n,0, 0, 0, 0, 0, 0, 0);
}
}
}
}// ifc==============================
<|start_filename|>IfcSharpCore/ifc_base.cs<|end_filename|>
// ifc_base.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
/*
SELECT-Types contains mixed Elements like Types AND Entities (ENTITY for TrimmingSelect.CartesianPoint, TYPE for TrimmingSelect.ParameterValue)
SELECT-Types can't be base classes, otherwise multiple inheritance (e.g. TYPE and ENTITY) would arise, what is not suported by c#.
SELECT-Types must checked for reading against all contained typs AND their derivations (permission check) e.g. Unit(DerivedUnit,MonetaryUnit,NamedUnit), but also SIUnit, because SIUnit is subtype of NamedUnit.
SELECT-Types can have multible possible types (not Entities), wich have the same base-type. So the the type-name nust be explicit to be named.
eg.: Value (DerivedMeasureValue,MeasureValue,SimpleValue), SimpleValue(Label<string>, Text<string>)
So SELECT must contain the instanced Typ-name, e.g. IFCBOOLEAN(.T.), IFCBOOLEAN(.F.)
Types don't need a explicit Type-name, e.g. GloballyUniqueId
Entity-Element wich have Type-names here have a preceding underscore.
Alternatively, an @ prefix would be possible.
Since element names only very rarely have type names, the preceding @ is omitted for reasons of better readability.
IfcSharp does not require the use of nullable base types, as it only affects classes (that are per se nullable). Only for enum data types nullable is sometimes required.
*/
using NetSystem=System;
namespace ifc{//==============================
enum TypeGroup {_NULL=-1,BASETYPE=0,LISTTYPE1D=1,LISTTYPE2D=2,TYPE=3,ENUM=4,ENTITY=5,SELECT=6,LISTTYPE1DOF2D=7}
public class ifcSqlAttribute:NetSystem.Attribute{public ifcSqlAttribute(int TypeGroupId, int TypeId, int TableId=0){this.SqlTypeId=TypeId;this.SqlTypeGroupId=TypeGroupId;this.SqlTableId=TableId;}
public int SqlTypeId=0;
public int SqlTypeGroupId=(int)TypeGroup._NULL;
public int SqlTableId=0;
}
public interface ifcSqlTypeInterface{
int SqlTypeId();
int SqlTypeGroupId();
int SqlTableId();
bool IsNull{get;set;}
}
public interface ifcParseInterface{
object Parse(string value);
}
public class ifcType{ // place-holder
}
public class ifcSqlType:ifcType,ifcSqlTypeInterface{
public int SqlTypeId(){return ((ifcSqlAttribute)this.GetType().GetCustomAttributes(true)[0]).SqlTypeId;}
public int SqlTypeGroupId(){return ((ifcSqlAttribute)this.GetType().GetCustomAttributes(true)[0]).SqlTypeGroupId;}
public int SqlTableId(){return ((ifcSqlAttribute)this.GetType().GetCustomAttributes(true)[0]).SqlTableId;}
public static int SqlTypeId(NetSystem.Type t){ return ((ifcSqlAttribute)t.GetCustomAttributes(true)[0]).SqlTypeId;}
public static int SqlTypeGroupId(NetSystem.Type t){return ((ifcSqlAttribute)t.GetCustomAttributes(true)[0]).SqlTypeGroupId;}
public static int SqlTableId(NetSystem.Type t){return ((ifcSqlAttribute)t.GetCustomAttributes(true)[0]).SqlTableId;}
private bool _IsNull=true;
public bool IsNull{get{return _IsNull;}set{_IsNull = value;}}
}
class Exception:NetSystem.Exception {public Exception (NetSystem.String reason) : base ("ifcSharp:"+reason){}}
public partial class GloballyUniqueId :TYPE<string> {public static string CreateNewId(){ return IfcGuid.ToIfcGuid(NetSystem.Guid.NewGuid()); }
public static GloballyUniqueId NewId(){ return new GloballyUniqueId(CreateNewId()); }
}
}//ifc==============================
<|start_filename|>IfcSql/ifc_in_sql.cs<|end_filename|>
// ifc_in_sql.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
using System.Threading;
using System.Collections.Generic;
using System.Collections;
using System.Globalization;
using System.IO;
using Threading=System.Threading;
using System.Reflection;
using db;
namespace ifc{//==============================
public partial class ENTITY{//==========================================================================================
}// of ENTITY =========================================================================================================
public partial class Model{//==========================================================================================
public Dictionary<long,int> LocalIdFromGlobalIdDict=new Dictionary<long,int>();
public void EvalIfcRow(ifcSQL.ifcInstance.Entity_Row e)
{try{
Type t=Type.GetType("ifc."+ifc.ENTITY.TypeDictionary.TypeIdNameDict[e.EntityTypeId],true,true);// 2. true: ignoreCase
ENTITY CurrentEntity=(ENTITY)Activator.CreateInstance(t);
CurrentEntity.LocalId=LocalIdFromGlobalIdDict[e.GlobalEntityInstanceId];// e.LocalId;
CurrentEntity.ifcSqlGlobalId=e.GlobalEntityInstanceId;
// commment-handling:
if (CurrentEntity is EntityComment) ((EntityComment)CurrentEntity).CommentLine=((ifcSQL.ifcInstance.EntityAttributeOfString_Row)e.AttributeValueDict[-1]).Value;
else if (e.AttributeValueDict.ContainsKey(-1)) CurrentEntity.EndOfLineComment=((ifcSQL.ifcInstance.EntityAttributeOfString_Row)e.AttributeValueDict[-1]).Value;
object[] TypeCtorArgs=new object[1];
int OrdinalPosition=0;
ENTITY.AttribListType AttribList=ENTITY.TypeDictionary.GetComponents(CurrentEntity.GetType()).AttribList;
foreach (ENTITY.AttribInfo attrib in AttribList)
{++OrdinalPosition;
if (e.AttributeValueDict.ContainsKey(OrdinalPosition))//----------------------------------------------------------------------------------------------------------
{RowBase rb=e.AttributeValueDict[OrdinalPosition];
if (rb is ifcSQL.ifcInstance.EntityAttributeOfVector_Row) {ifcSQL.ifcInstance.EntityAttributeOfVector_Row a=(ifcSQL.ifcInstance.EntityAttributeOfVector_Row)rb;
if (a.TypeId==25) {if (a.Z!=null) ((ifc.CartesianPoint)CurrentEntity).Coordinates=new List1to3_LengthMeasure((LengthMeasure)a.X,(LengthMeasure)a.Y,(LengthMeasure)(double)a.Z);
else ((ifc.CartesianPoint)CurrentEntity).Coordinates=new List1to3_LengthMeasure((LengthMeasure)a.X,(LengthMeasure)a.Y);
}
#if IFC2X3
if (a.TypeId==42) {if (a.Z!=null) ((ifc.Direction)CurrentEntity).DirectionRatios=new List2to3_double(a.X,a.Y,(double)a.Z);
else ((ifc.Direction)CurrentEntity).DirectionRatios=new List2to3_double(a.X,a.Y);
}
#else
if (a.TypeId==42) {if (a.Z!=null) ((ifc.Direction)CurrentEntity).DirectionRatios=new List2to3_Real((Real)a.X,(Real)a.Y,(Real)(double)a.Z);
else ((ifc.Direction)CurrentEntity).DirectionRatios=new List2to3_Real((Real)a.X,(Real)a.Y);
}
#endif
}
else if (rb is ifcSQL.ifcInstance.EntityAttributeOfString_Row) {ifcSQL.ifcInstance.EntityAttributeOfString_Row a=(ifcSQL.ifcInstance.EntityAttributeOfString_Row)rb;
TypeCtorArgs[0]=ifc.IfcString.Decode(a.Value);
attrib.field.SetValue(CurrentEntity,Activator.CreateInstance(attrib.field.FieldType,TypeCtorArgs));
}
else if (rb is ifcSQL.ifcInstance.EntityAttributeOfEnum_Row) {ifcSQL.ifcInstance.EntityAttributeOfEnum_Row a=(ifcSQL.ifcInstance.EntityAttributeOfEnum_Row)rb;
Type UnderlyingType = Nullable.GetUnderlyingType( attrib.field.FieldType);
if (UnderlyingType!=null && UnderlyingType.IsEnum) attrib.field.SetValue(CurrentEntity,Enum.ToObject(UnderlyingType, a.Value));
else attrib.field.SetValue(CurrentEntity,a.Value);
}
else if (rb is ifcSQL.ifcInstance.EntityAttributeOfInteger_Row) {ifcSQL.ifcInstance.EntityAttributeOfInteger_Row a=(ifcSQL.ifcInstance.EntityAttributeOfInteger_Row)rb;
object o=Activator.CreateInstance(ENTITY.TypeDictionary.TypeIdTypeDict[a.TypeId],a.Value);
if (attrib.field.FieldType.IsSubclassOf(typeof(SELECT))) {TypeCtorArgs[0]=o;o=Activator.CreateInstance(attrib.field.FieldType,TypeCtorArgs);}
attrib.field.SetValue(CurrentEntity,o);
}
else if (rb is ifcSQL.ifcInstance.EntityAttributeOfFloat_Row) {ifcSQL.ifcInstance.EntityAttributeOfFloat_Row a=(ifcSQL.ifcInstance.EntityAttributeOfFloat_Row)rb;
object o=Activator.CreateInstance(ENTITY.TypeDictionary.TypeIdTypeDict[a.TypeId],a.Value);
if (attrib.field.FieldType.IsSubclassOf(typeof(SELECT))) {TypeCtorArgs[0]=o;o=Activator.CreateInstance(attrib.field.FieldType,TypeCtorArgs);}
attrib.field.SetValue(CurrentEntity,o);
}
else if (rb is ifcSQL.ifcInstance.EntityAttributeOfEntityRef_Row) {ifcSQL.ifcInstance.EntityAttributeOfEntityRef_Row a=(ifcSQL.ifcInstance.EntityAttributeOfEntityRef_Row)rb;
if (a.Value>0)
{Type AttributeInstanceType=ifc.ENTITY.TypeDictionary.TypeIdTypeDict[a.TypeId];
object o=Activator.CreateInstance(AttributeInstanceType);((ENTITY)o).LocalId=LocalIdFromGlobalIdDict[a.Value];
if (attrib.field.FieldType.IsSubclassOf(typeof(SELECT))) {TypeCtorArgs[0]=o;o=Activator.CreateInstance(attrib.field.FieldType,TypeCtorArgs);}
attrib.field.SetValue(CurrentEntity,o);
}
}
else if (rb is ifcSQL.ifcInstance.EntityAttributeOfList_Row) {ifcSQL.ifcInstance.EntityAttributeOfList_Row a=(ifcSQL.ifcInstance.EntityAttributeOfList_Row)rb;
Type GenericType=null;
if (attrib.field.FieldType.BaseType.GetGenericArguments().Length>0) GenericType=attrib.field.FieldType.BaseType.GetGenericArguments()[0]; //LengthMeasure or CartesianPoint
else GenericType=attrib.field.FieldType.BaseType.BaseType.GetGenericArguments()[0]; //CompoundPlaneAngleMeasure
Type AttributeInstanceType=ifc.ENTITY.TypeDictionary.TypeIdTypeDict[a.TypeId];
int ListDim1Count=a.AttributeValueDict.Count;
object[] FieldCtorArgs=new object[ListDim1Count];
if (ListDim1Count>0)
if (a.AttributeValueDict[0] is ifcSQL.ifcInstance.EntityAttributeListElementOfEntityRef_Row)
for (int ListDim1Position=0;ListDim1Position<ListDim1Count;ListDim1Position++)
{int Id=LocalIdFromGlobalIdDict[((ifcSQL.ifcInstance.EntityAttributeListElementOfEntityRef_Row)a.AttributeValueDict[ListDim1Position]).Value];
object[] GenericCtorArgs=new object[1];
FieldCtorArgs[ListDim1Position]=Activator.CreateInstance(GenericType); // Console.WriteLine("GenericType= "+GenericType.ToString());
if (GenericType.IsSubclassOf(typeof(SELECT))) ((SELECT)FieldCtorArgs[ListDim1Position]).Id=Id;
else if (GenericType.IsSubclassOf(typeof(ENTITY))) ((ENTITY)FieldCtorArgs[ListDim1Position]).LocalId=Id;
else Console.WriteLine("unkown type");
}
attrib.field.SetValue(CurrentEntity,Activator.CreateInstance(attrib.field.FieldType,FieldCtorArgs));
}
}//----------------------------------------------------------------------------------------------------------
}//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
EntityList.Add(CurrentEntity);
}catch(Exception ex){Console.WriteLine ("ERROR on EvalIfcRow:"+ex.Message);}//Console.ReadLine();}
}
public static Model FromSql(string ServerName,string DatabaseName="ifcSQL_Instance",int ProjectId=0)
{
ifcSQL._ifcSQL_for_ifcSQL_instance ifcSQLin=new ifcSQL._ifcSQL_for_ifcSQL_instance (ServerName: ServerName,DatabaseName:DatabaseName);
ifcSQLin.conn.Open();
if (ProjectId>0) ifcSQLin.ExecuteNonQuery("app.SelectProject "+ProjectId);
ifcSQLin.conn.Close();
ifcSQLin.LoadAllTables();
Dictionary<long,ifcSQL.ifcInstance.Entity_Row> Entity_RowDict=new Dictionary<long, ifcSQL.ifcInstance.Entity_Row>();
foreach (ifcSQL.ifcInstance.Entity_Row e in ifcSQLin.cp.Entity)
{e.AttributeValueDict=new Dictionary<int,RowBase>();
Entity_RowDict.Add(e.GlobalEntityInstanceId,e);
}
ifc.ENTITY.TypeDictionary.FillEntityTypeComponentsDict(); // fill Type Dict
foreach (ifcSQL.ifcInstance.EntityAttributeOfString_Row a in ifcSQLin.cp.EntityAttributeOfString) Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
foreach (ifcSQL.ifcInstance.EntityAttributeOfVector_Row a in ifcSQLin.cp.EntityAttributeOfVector) Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
foreach (ifcSQL.ifcInstance.EntityAttributeOfBinary_Row a in ifcSQLin.cp.EntityAttributeOfBinary) Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
foreach (ifcSQL.ifcInstance.EntityAttributeOfBoolean_Row a in ifcSQLin.cp.EntityAttributeOfBoolean) Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
foreach (ifcSQL.ifcInstance.EntityAttributeOfEntityRef_Row a in ifcSQLin.cp.EntityAttributeOfEntityRef) Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
foreach (ifcSQL.ifcInstance.EntityAttributeOfEnum_Row a in ifcSQLin.cp.EntityAttributeOfEnum) Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
foreach (ifcSQL.ifcInstance.EntityAttributeOfFloat_Row a in ifcSQLin.cp.EntityAttributeOfFloat) Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
foreach (ifcSQL.ifcInstance.EntityAttributeOfInteger_Row a in ifcSQLin.cp.EntityAttributeOfInteger) Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
foreach (ifcSQL.ifcInstance.EntityAttributeOfList_Row a in ifcSQLin.cp.EntityAttributeOfList)
{Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict.Add(a.OrdinalPosition,a);
a.AttributeValueDict=new Dictionary<int,RowBase>();
}
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfEntityRef_Row a in ifcSQLin.cp.EntityAttributeListElementOfEntityRef) ((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]).AttributeValueDict.Add(a.ListDim1Position,a);
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfBinary_Row a in ifcSQLin.cp.EntityAttributeListElementOfBinary) ((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]).AttributeValueDict.Add(a.ListDim1Position,a);
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfFloat_Row a in ifcSQLin.cp.EntityAttributeListElementOfFloat) ((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]).AttributeValueDict.Add(a.ListDim1Position,a);
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfInteger_Row a in ifcSQLin.cp.EntityAttributeListElementOfInteger) ((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]).AttributeValueDict.Add(a.ListDim1Position,a);
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfString_Row a in ifcSQLin.cp.EntityAttributeListElementOfString) ((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]).AttributeValueDict.Add(a.ListDim1Position,a);
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfList_Row a in ifcSQLin.cp.EntityAttributeListElementOfList)
{ifcSQL.ifcInstance.EntityAttributeOfList_Row lr=((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]);
lr.AttributeValueDict.Add(a.ListDim1Position,a);
a.AttributeValueDict=new Dictionary<int,RowBase>();
}
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfListElementOfEntityRef_Row a in ifcSQLin.cp.EntityAttributeListElementOfListElementOfEntityRef) ((ifcSQL.ifcInstance.EntityAttributeListElementOfList_Row) ((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]).AttributeValueDict[a.ListDim1Position]).AttributeValueDict.Add(a.ListDim2Position,a);
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfListElementOfFloat_Row a in ifcSQLin.cp.EntityAttributeListElementOfListElementOfFloat ) ((ifcSQL.ifcInstance.EntityAttributeListElementOfList_Row) ((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]).AttributeValueDict[a.ListDim1Position]).AttributeValueDict.Add(a.ListDim2Position,a);
foreach (ifcSQL.ifcInstance.EntityAttributeListElementOfListElementOfInteger_Row a in ifcSQLin.cp.EntityAttributeListElementOfListElementOfInteger ) ((ifcSQL.ifcInstance.EntityAttributeListElementOfList_Row) ((ifcSQL.ifcInstance.EntityAttributeOfList_Row)Entity_RowDict[a.GlobalEntityInstanceId].AttributeValueDict[a.OrdinalPosition]).AttributeValueDict[a.ListDim1Position]).AttributeValueDict.Add(a.ListDim2Position,a);
ifcSQL.ifcProject.Project_Row CurrentProJect=(ifcSQL.ifcProject.Project_Row)ifcSQLin.cp.Project[0];
Model NewModel=new Model();
foreach (ifcSQL.ifcProject.EntityInstanceIdAssignment_Row eia in ifcSQLin.cp.EntityInstanceIdAssignment) NewModel.LocalIdFromGlobalIdDict[eia.GlobalEntityInstanceId]=(int)eia.ProjectEntityInstanceId; // create and fill LocalGlobal Dict
NewModel.Header.Init(name:CurrentProJect.ProjectName,description:CurrentProJect.ProjectDescription,author:"<NAME>, <NAME>",preprocessor_version:"IfcSharp");
foreach (ifcSQL.ifcInstance.Entity_Row e in ifcSQLin.cp.Entity) NewModel.EvalIfcRow(e);
NewModel.AssignEntities();
return NewModel;
}// of FromSql
}// of Model ==========================================================================================================
}//ifc=================================================================================================================
//#################################################################################################################################################################
//#################################################################################################################################################################
namespace ifcSQL{//########################################################################
namespace ifcInstance{//=====================================================================
public partial class Entity_Row : RowBase{public Dictionary<int,RowBase> AttributeValueDict=null;}
public partial class EntityAttributeOfList_Row : RowBase{public Dictionary<int,RowBase> AttributeValueDict=null;}
public partial class EntityAttributeListElementOfList_Row : RowBase{public Dictionary<int,RowBase> AttributeValueDict=null;}
}// namespace ifcInstance -------------------------------------------------------------------
}// namespace ifcSQL ########################################################################
<|start_filename|>IfcSharpCore/ifc_model.cs<|end_filename|>
// ifc_model.cs, Copyright (c) 2020, <NAME>, <NAME>, MIT License (see https://github.com/IfcSharp/IfcSharpLibrary/tree/master/Licence)
using System;
using System.Collections.Generic;
using System.Collections;
using System.Reflection;
namespace ifc{//==============================
public partial class Repository{//==========================================================================================
public static ifc.Model CurrentModel=new ifc.Model();
}//========================================================================================================
public partial class Model{//==========================================================================================
public Model(){}
public Model(string name){this.Header.name=name;}
public Model(string name,string description,string author,string organization,string originating_system,string documentation)
{this.Header.name = name;
this.Header.description = description;
this.Header.author = author;
this.Header.organization = organization;
this.Header.originating_system = originating_system;
this.Header.documentation = documentation;
}
public int NextGlobalId=1;
public int NextGlobalCommentId=0;
public ifc.HeaderData Header=new ifc.HeaderData();
public List<ENTITY> EntityList=new List<ENTITY>();
public void ClearEntityList(){EntityList.Clear();Header.Reset();}
public Dictionary<int,ENTITY> EntityDict=new Dictionary<int,ENTITY>();
public void AssignEntities()
{
EntityDict.Clear();
foreach (ENTITY e in EntityList) /* if (e.LocalId>0) */ if (!EntityDict.ContainsKey(e.LocalId)) {EntityDict.Add(e.LocalId,e);} else Console.WriteLine("#"+e.LocalId+" already exist! (double Entry)");
foreach (ENTITY e in EntityList) /* if (e.LocalId>0) */
{//####################################################################################################
Dictionary<int,FieldInfo> VarDict=new Dictionary<int,FieldInfo>();
int VarCount=0; foreach (FieldInfo field in e.GetType().GetFields(BindingFlags.Public|BindingFlags.Instance|BindingFlags.FlattenHierarchy)) foreach (Attribute attr in field.GetCustomAttributes(true)) if (attr is ifcAttribute) {VarDict.Add(((ifcAttribute)attr).OrdinalPosition,field);VarCount++;}
for (int i=1;i<=VarCount;i++)
{//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
FieldInfo field=VarDict[i];
if ( field.FieldType.IsSubclassOf(typeof(ENTITY)))
{ENTITY E=(ENTITY)field.GetValue(e);
if (E!=null) {if (E.LocalId>0) if (EntityDict.ContainsKey(E.LocalId)) field.SetValue(e,EntityDict[E.LocalId]); /* E=EntityDict[E.Id];*/
else Console.WriteLine("E.Id="+E.LocalId+" nicht gefunden");
}
}
else if (field.FieldType.IsSubclassOf(typeof(SELECT)))
{
SELECT S=(SELECT)field.GetValue(e);
if (S!=null)
{//...........................................
if (S.Id>0 && EntityDict.ContainsKey(S.Id)) S.SetValueAndType(EntityDict[S.Id],EntityDict[S.Id].GetType());
else if (!S.IsNull) {ENTITY E=null; if (S!=null) if ( S.SelectType().IsSubclassOf(typeof(ENTITY)) ) E=(ENTITY)S.SelectValue();
if (E!=null) if (E.LocalId>0 && EntityDict.ContainsKey(E.LocalId)) S.SetValueAndType(EntityDict[E.LocalId],EntityDict[E.LocalId].GetType());
}
}//...........................................
}
else if (typeof(IEnumerable).IsAssignableFrom(field.FieldType)) if (field.GetValue(e)!=null)
{//==================================================================
//Console.WriteLine("start list "+i+":"+field.FieldType.Name);
Dictionary<int,object> VarDict1=new Dictionary<int,object>();
int VarCount1=0;foreach (object item in (IEnumerable)field.GetValue(e)) if (item!=null) VarDict1.Add(VarCount1++,item);
object[] FieldCtorArgs=new object[VarCount1];
Type GenericType=null;
if (field.FieldType.BaseType.GetGenericArguments().Length>0) GenericType=field.FieldType.BaseType.GetGenericArguments()[0]; //LengthMeasure or CartesianPoint
else GenericType=field.FieldType.BaseType.BaseType.GetGenericArguments()[0]; //CompoundPlaneAngleMeasure
if ((GenericType!=null) && ( (GenericType.IsSubclassOf(typeof(ENTITY))) || GenericType.IsSubclassOf(typeof(SELECT)) ) )
{//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
for (int i1=0;i1<VarCount1;i1++)
{//------------------------------------------------------
object item=VarDict1[i1]; //Console.Write(field.Name+", "+i+" "+i1);
if (item is SELECT) {//Console.WriteLine("SELECT item "+((SELECT)item).Id +" "+((SELECT)item).SelectType().Name);
if (((SELECT)item).Id==0) if ( ((SELECT)item).SelectType().IsSubclassOf(typeof(ENTITY)) ) { ((SELECT)item).Id=((ENTITY)((SELECT)item).SelectValue()).LocalId; }
if (((SELECT)item).Id>0) {//SELECT s=new SELECT(); /*((SELECT)item)*/
SELECT s=(SELECT)item;
s.SetValueAndType(EntityDict[((SELECT)item).Id],EntityDict[((SELECT)item).Id].GetType());
FieldCtorArgs[i1]=s;// Console.WriteLine(GenericType.Name+": ");
}
}
else if (item is ENTITY) {//===================
if (((ENTITY)item).LocalId>0)
{ENTITY E=(ENTITY)item; // Console.WriteLine("((ENTITY)item).Id="+((ENTITY)item).Id );
if (E!=null) if (E.LocalId>0) {//........................
if (EntityDict.ContainsKey(E.LocalId)) E=EntityDict[E.LocalId]; else Console.WriteLine("E.Id="+E.LocalId+" nicht gefunden");}
FieldCtorArgs[i1]=E;
}//........................
}//===================
}//---------------------------------------------------
field.SetValue(e,Activator.CreateInstance(field.FieldType,FieldCtorArgs)); // ERROR !!
}//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Console.WriteLine("end list");
}//==============================================================
}//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++// of foreach field
}//#################################################################################################### //of foreach Entity
}//of void
}//========================================================================================================
}// ifc======================================= | IfcSharp/IfcSharpLibrary |
<|start_filename|>package.json<|end_filename|>
{
"name": "stylis-plugin-rtl",
"version": "2.1.1",
"description": "Fork of stylis-rtl, uses cssjanus under the hood to flip style orientations for RTL",
"module": "dist/stylis-rtl.js",
"main": "dist/cjs/stylis-rtl.js",
"types": "dist/stylis-rtl.d.ts",
"scripts": {
"build": "tsc && tsc --module commonjs --outDir dist/cjs",
"prepublishOnly": "yarn build",
"test": "jest"
},
"files": [
"dist",
"types"
],
"repository": {
"type": "git",
"url": "git+https://github.com/styled-components/stylis-plugin-rtl.git"
},
"keywords": [
"rtl",
"bidi",
"stylis",
"emotion",
"emotion-js",
"css",
"preprocessor",
"styled-components"
],
"author": "<NAME>",
"contributors": [
"<NAME> <<EMAIL>> (https://probablyup.com)"
],
"license": "MIT",
"bugs": {
"url": "https://github.com/styled-components/stylis-plugin-rtl/issues"
},
"homepage": "https://github.com/styled-components/stylis-plugin-rtl#readme",
"peerDependencies": {
"stylis": "4.x"
},
"devDependencies": {
"@types/jest": "^27.0.2",
"@types/stylis": "^4.0.2",
"jest": "^27.3.1",
"prettier": "^2.4.1",
"stylis": "^4.0.2",
"ts-jest": "^27.0.7",
"typescript": "^4.4.4"
},
"dependencies": {
"cssjanus": "^2.0.1"
}
}
| styled-components/stylis-plugin-rtl |
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Adding Shape to Presentation/Aspose.Slides/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose.Slides
{
class Program
{
static void Main(string[] args)
{
//Instantiate Prseetation class that represents the PPTX
Presentation pres = new Presentation();
//Get the first slide
ISlide slide = pres.Slides[0];
//Add an autoshape of type line
slide.Shapes.AddAutoShape(ShapeType.Line, 50, 150, 300, 0);
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Create a presentation document/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Create a presentation document.pptx";
CreatePresentation(FileName);
}
public static void CreatePresentation(string filepath)
{
//Instantiate a Presentation object that represents a PPT file
using (Presentation pres = new Presentation())
{
//Instantiate SlideExCollection calss
ISlideCollection slds = pres.Slides;
//Add an empty slide to the SlidesEx collection
slds.AddEmptySlide(pres.LayoutSlides[0]);
//Save your presentation to a file
pres.Save(filepath,Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Examples/CSharp/Charts/SetMarkerOptions.cs<|end_filename|>
using System;
using System.Drawing;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class SetMarkerOptions
{
public static void Run()
{
//ExStart:SetMarkerOptions
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create an instance of Presentation class
Presentation presentation = new Presentation();
ISlide slide = presentation.Slides[0];
// Creating the default chart
IChart chart = slide.Shapes.AddChart(ChartType.LineWithMarkers, 0, 0, 400, 400);
// Getting the default chart data worksheet index
int defaultWorksheetIndex = 0;
// Getting the chart data worksheet
IChartDataWorkbook fact = chart.ChartData.ChartDataWorkbook;
// Delete demo series
chart.ChartData.Series.Clear();
// Add new series
chart.ChartData.Series.Add(fact.GetCell(defaultWorksheetIndex, 1, 1, "Series 1"), chart.Type);
// Set the picture
System.Drawing.Image image1 = (System.Drawing.Image)new Bitmap(dataDir + "aspose-logo.jpg");
IPPImage imgx1 = presentation.Images.AddImage(image1);
// Set the picture
System.Drawing.Image image2 = (System.Drawing.Image)new Bitmap(dataDir + "Tulips.jpg");
IPPImage imgx2 = presentation.Images.AddImage(image2);
// Take first chart series
IChartSeries series = chart.ChartData.Series[0];
// Add new point (1:3) there.
IChartDataPoint point = series.DataPoints.AddDataPointForLineSeries(fact.GetCell(defaultWorksheetIndex, 1, 1, (double)4.5));
point.Marker.Format.Fill.FillType = FillType.Picture;
point.Marker.Format.Fill.PictureFillFormat.Picture.Image = imgx1;
point = series.DataPoints.AddDataPointForLineSeries(fact.GetCell(defaultWorksheetIndex, 2, 1, (double)2.5));
point.Marker.Format.Fill.FillType = FillType.Picture;
point.Marker.Format.Fill.PictureFillFormat.Picture.Image = imgx2;
point = series.DataPoints.AddDataPointForLineSeries(fact.GetCell(defaultWorksheetIndex, 3, 1, (double)3.5));
point.Marker.Format.Fill.FillType = FillType.Picture;
point.Marker.Format.Fill.PictureFillFormat.Picture.Image = imgx1;
point = series.DataPoints.AddDataPointForLineSeries(fact.GetCell(defaultWorksheetIndex, 4, 1, (double)4.5));
point.Marker.Format.Fill.FillType = FillType.Picture;
point.Marker.Format.Fill.PictureFillFormat.Picture.Image = imgx2;
// Changing the chart series marker
series.Marker.Size = 15;
// Write presentation to disk
presentation.Save(dataDir + "MarkOptions_out.pptx", SaveFormat.Pptx);
//ExEnd:SetMarkerOptions
}
}
}
<|start_filename|>Plugins/OpenXML/Missing Features/Print Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
PrintByDefaultPrinter();
PrintBySpecificPrinter();
}
public static void PrintByDefaultPrinter()
{
string MyDir = @"..\..\..\Sample Files\";
//Load the presentation
Presentation asposePresentation = new Presentation(MyDir + "Print.pptx");
//Call the print method to print whole presentation to the default printer
asposePresentation.Print();
}
public static void PrintBySpecificPrinter()
{
string MyDir = @"..\..\..\Sample Files\";
//Load the presentation
Presentation asposePresentation = new Presentation(MyDir + "Print.pptx");
//Call the print method to print whole presentation to the desired printer
asposePresentation.Print("LaserJet1100");
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Properties/AddCustomDocumentProperties.cs<|end_filename|>
using System;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Properties
{
class AddCustomDocumentProperties
{
public static void Run()
{
//ExStart:AddCustomDocumentProperties
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationProperties();
// Instantiate the Presentation class
Presentation presentation = new Presentation();
// Getting Document Properties
IDocumentProperties documentProperties = presentation.DocumentProperties;
// Adding Custom properties
documentProperties["New Custom"] = 12;
documentProperties["My Name"] = "Mudassir";
documentProperties["Custom"] = 124;
// Getting property name at particular index
String getPropertyName = documentProperties.GetCustomPropertyName(2);
// Removing selected property
documentProperties.RemoveCustomProperty(getPropertyName);
// Saving presentation
presentation.Save(dataDir + "CustomDocumentProperties_out.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
//ExEnd:AddCustomDocumentProperties
}
}
}
<|start_filename|>Examples/CSharp/Shapes/ConnectShapeUsingConnectionSite.cs<|end_filename|>
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class ConnectShapeUsingConnectionSite
{
public static void Run()
{
//ExStart:ConnectShapeUsingConnectionSite
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Instantiate Presentation class that represents the PPTX file
using (Presentation presentation = new Presentation())
{
// Accessing shapes collection for selected slide
IShapeCollection shapes = presentation.Slides[0].Shapes;
// Adding connector shape to slide shape collection
IConnector connector = shapes.AddConnector(ShapeType.BentConnector3, 0, 0, 10, 10);
// Add autoshape Ellipse
IAutoShape ellipse = shapes.AddAutoShape(ShapeType.Ellipse, 0, 100, 100, 100);
// Add autoshape Rectangle
IAutoShape rectangle = shapes.AddAutoShape(ShapeType.Rectangle, 100, 200, 100, 100);
// Joining Shapes to connectors
connector.StartShapeConnectedTo = ellipse;
connector.EndShapeConnectedTo = rectangle;
// Setting the desired connection site index of Ellipse shape for connector to get connected
uint wantedIndex = 6;
// Checking if desired index is less than maximum site index count
if (ellipse.ConnectionSiteCount > wantedIndex)
{
// Setting the desired connection site for connector on Ellipse
connector.StartShapeConnectionSiteIndex = wantedIndex;
}
// Save presentation
presentation.Save(dataDir + "Connecting_Shape_on_desired_connection_site_out.pptx", SaveFormat.Pptx);
}
//ExEnd:ConnectShapeUsingConnectionSite
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Create and Embed an Excel Chart as an OLE Object/VSTO Slides/ThisAddIn.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using Office = Microsoft.Office.Core;
using pptNS = Microsoft.Office.Interop.PowerPoint;
using xlNS = Microsoft.Office.Interop.Excel;
namespace VSTO_Slides
{
public partial class ThisAddIn
{
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
CreateNewChartInExcel();
UseCopyPaste();
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
public void SetCellValue(xlNS.Worksheet targetSheet, string Cell, object Value)
{
targetSheet.get_Range(Cell, Cell).set_Value(xlNS.XlRangeValueDataType.xlRangeValueDefault, Value);
}
public void CreateNewChartInExcel()
{
// Declare a variable for the Excel ApplicationClass instance.
Microsoft.Office.Interop.Excel.Application excelApplication = new xlNS.Application();//new Microsoft.Office.Interop.Excel.ApplicationClass();
// Declare variables for the Workbooks.Open method parameters.
string paramWorkbookPath = System.Windows.Forms.Application.StartupPath + @"\ChartData.xlsx";
object paramMissing = Type.Missing;
// Declare variables for the Chart.ChartWizard method.
object paramChartFormat = 1;
object paramCategoryLabels = 0;
object paramSeriesLabels = 0;
bool paramHasLegend = true;
object paramTitle = "Sales by Quarter";
object paramCategoryTitle = "Fiscal Quarter";
object paramValueTitle = "Billions";
try
{
// Create an instance of the Excel ApplicationClass object.
// excelApplication = new Microsoft.Office.Interop.Excel.ApplicationClass();
// Create a new workbook with 1 sheet in it.
xlNS.Workbook newWorkbook = excelApplication.Workbooks.Add(xlNS.XlWBATemplate.xlWBATWorksheet);
// Change the name of the sheet.
xlNS.Worksheet targetSheet = (xlNS.Worksheet)(newWorkbook.Worksheets[1]);
targetSheet.Name = "Quarterly Sales";
// Insert some data for the chart into the sheet.
// A B C D E
// 1 Q1 Q2 Q3 Q4
// 2 N. America 1.5 2 1.5 2.5
// 3 S. America 2 1.75 2 2
// 4 Europe 2.25 2 2.5 2
// 5 Asia 2.5 2.5 2 2.75
SetCellValue(targetSheet, "A2", "N. America");
SetCellValue(targetSheet, "A3", "S. America");
SetCellValue(targetSheet, "A4", "Europe");
SetCellValue(targetSheet, "A5", "Asia");
SetCellValue(targetSheet, "B1", "Q1");
SetCellValue(targetSheet, "B2", 1.5);
SetCellValue(targetSheet, "B3", 2);
SetCellValue(targetSheet, "B4", 2.25);
SetCellValue(targetSheet, "B5", 2.5);
SetCellValue(targetSheet, "C1", "Q2");
SetCellValue(targetSheet, "C2", 2);
SetCellValue(targetSheet, "C3", 1.75);
SetCellValue(targetSheet, "C4", 2);
SetCellValue(targetSheet, "C5", 2.5);
SetCellValue(targetSheet, "D1", "Q3");
SetCellValue(targetSheet, "D2", 1.5);
SetCellValue(targetSheet, "D3", 2);
SetCellValue(targetSheet, "D4", 2.5);
SetCellValue(targetSheet, "D5", 2);
SetCellValue(targetSheet, "E1", "Q4");
SetCellValue(targetSheet, "E2", 2.5);
SetCellValue(targetSheet, "E3", 2);
SetCellValue(targetSheet, "E4", 2);
SetCellValue(targetSheet, "E5", 2.75);
// Get the range holding the chart data.
xlNS.Range dataRange = targetSheet.get_Range("A1", "E5");
// Get the ChartObjects collection for the sheet.
xlNS.ChartObjects chartObjects = (xlNS.ChartObjects)(targetSheet.ChartObjects(paramMissing));
// Add a Chart to the collection.
xlNS.ChartObject newChartObject = chartObjects.Add(0, 100, 600, 300);
newChartObject.Name = "Sales Chart";
// Create a new chart of the data.
newChartObject.Chart.ChartWizard(dataRange, xlNS.XlChartType.xl3DColumn, paramChartFormat, xlNS.XlRowCol.xlRows,
paramCategoryLabels, paramSeriesLabels, paramHasLegend, paramTitle, paramCategoryTitle, paramValueTitle, paramMissing);
// Save the workbook.
newWorkbook.SaveAs(paramWorkbookPath, paramMissing, paramMissing, paramMissing, paramMissing,
paramMissing, xlNS.XlSaveAsAccessMode.xlNoChange, paramMissing, paramMissing, paramMissing, paramMissing, paramMissing);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
finally
{
if (excelApplication != null)
{
// Close Excel.
excelApplication.Quit();
}
}
}
public void UseCopyPaste()
{
// Declare variables to hold references to PowerPoint objects.
pptNS.Application powerpointApplication = null;
pptNS.Presentation pptPresentation = null;
pptNS.Slide pptSlide = null;
pptNS.ShapeRange shapeRange = null;
// Declare variables to hold references to Excel objects.
xlNS.Application excelApplication = null;
xlNS.Workbook excelWorkBook = null;
xlNS.Worksheet targetSheet = null;
xlNS.ChartObjects chartObjects = null;
xlNS.ChartObject existingChartObject = null;
string paramPresentationPath = System.Windows.Forms.Application.StartupPath + @"\ChartTest.pptx";
string paramWorkbookPath = System.Windows.Forms.Application.StartupPath + @"\ChartData.xlsx";
object paramMissing = Type.Missing;
try
{
// Create an instance of PowerPoint.
powerpointApplication = new pptNS.Application();
// Create an instance Excel.
excelApplication = new xlNS.Application();
// Open the Excel workbook containing the worksheet with the chart data.
excelWorkBook = excelApplication.Workbooks.Open(paramWorkbookPath,
paramMissing, paramMissing, paramMissing, paramMissing, paramMissing,
paramMissing, paramMissing, paramMissing, paramMissing, paramMissing,
paramMissing, paramMissing, paramMissing, paramMissing);
// Get the worksheet that contains the chart.
targetSheet =
(xlNS.Worksheet)(excelWorkBook.Worksheets["Quarterly Sales"]);
// Get the ChartObjects collection for the sheet.
chartObjects =
(xlNS.ChartObjects)(targetSheet.ChartObjects(paramMissing));
// Get the chart to copy.
existingChartObject =
(xlNS.ChartObject)(chartObjects.Item("Sales Chart"));
// Create a PowerPoint presentation.
pptPresentation =
powerpointApplication.Presentations.Add(
Microsoft.Office.Core.MsoTriState.msoTrue);
// Add a blank slide to the presentation.
pptSlide =
pptPresentation.Slides.Add(1, pptNS.PpSlideLayout.ppLayoutBlank);
// Copy the chart from the Excel worksheet to the clipboard.
existingChartObject.Copy();
// Paste the chart into the PowerPoint presentation.
shapeRange = pptSlide.Shapes.Paste();
// Position the chart on the slide.
shapeRange.Left = 60;
shapeRange.Top = 100;
// Save the presentation.
pptPresentation.SaveAs(paramPresentationPath, pptNS.PpSaveAsFileType.ppSaveAsOpenXMLPresentation, Microsoft.Office.Core.MsoTriState.msoTrue);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
finally
{
// Release the PowerPoint slide object.
shapeRange = null;
pptSlide = null;
// Close and release the Presentation object.
if (pptPresentation != null)
{
pptPresentation.Close();
pptPresentation = null;
}
// Quit PowerPoint and release the ApplicationClass object.
if (powerpointApplication != null)
{
powerpointApplication.Quit();
powerpointApplication = null;
}
// Release the Excel objects.
targetSheet = null;
chartObjects = null;
existingChartObject = null;
// Close and release the Excel Workbook object.
if (excelWorkBook != null)
{
excelWorkBook.Close(false, paramMissing, paramMissing);
excelWorkBook = null;
}
// Quit Excel and release the ApplicationClass object.
if (excelApplication != null)
{
excelApplication.Quit();
excelApplication = null;
}
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
GC.WaitForPendingFinalizers();
}
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Add a comment to a slide/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
using System;
using System.Drawing;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Add a comment to a slide.pptx";
using (Presentation pres = new Presentation())
{
//Adding Empty slide
pres.Slides.AddEmptySlide(pres.LayoutSlides[0]);
//Adding Autthor
ICommentAuthor author = pres.CommentAuthors.AddAuthor("Zeeshan", "MZ");
//Position of comments
PointF point = new PointF();
point.X = 1;
point.Y = 1;
//Adding slide comment for an author on slide
author.Comments.AddComment("Hello Zeeshan, this is slide comment", pres.Slides[0], point, DateTime.Now);
pres.Save(FileName, Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Examples/CSharp/Charts/AnimatingCategoriesElements.cs<|end_filename|>
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides.Animation;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class AnimatingCategoriesElements
{
public static void Run()
{
//ExStart:AnimatingCategoriesElements
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
using (Presentation presentation = new Presentation(dataDir + "ExistingChart.pptx"))
{
// Get reference of the chart object
var slide = presentation.Slides[0] as Slide;
var shapes = slide.Shapes as ShapeCollection;
var chart = shapes[0] as IChart;
// Animate categories' elements
slide.Timeline.MainSequence.AddEffect(chart, EffectType.Fade, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 0, 0, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 0, 1, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 0, 2, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 0, 3, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 1, 0, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 1, 1, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 1, 2, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 1, 3, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 2, 0, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 2, 1, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 2, 2, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart, EffectChartMinorGroupingType.ByElementInCategory, 2, 3, EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
// Write the presentation file to disk
presentation.Save(dataDir + "AnimatingCategoriesElements_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AnimatingCategoriesElements
}
}
}
<|start_filename|>Examples/CSharp/Slides/CRUD/RemoveSlideUsingIndex.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.CRUD
{
public class RemoveSlideUsingIndex
{
public static void Run()
{
//ExStart:RemoveSlideUsingIndex
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_CRUD();
// Instantiate a Presentation object that represents a presentation file
using (Presentation pres = new Presentation(dataDir + "RemoveSlideUsingIndex.pptx"))
{
// Removing a slide using its slide index
pres.Slides.RemoveAt(0);
// Writing the presentation file
pres.Save(dataDir + "modified_out.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
}
//ExEnd:RemoveSlideUsingIndex
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Get all the text in all slides/OpenXML Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml;
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using A = DocumentFormat.OpenXml.Drawing;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Get all the text in a slide.pptx";
int numberOfSlides = CountSlides(FileName);
System.Console.WriteLine("Number of slides = {0}", numberOfSlides);
string slideText;
for (int i = 0; i < numberOfSlides; i++)
{
GetSlideIdAndText(out slideText, FileName, i);
System.Console.WriteLine("Slide #{0} contains: {1}", i + 1, slideText);
}
System.Console.ReadKey();
}
public static int CountSlides(string presentationFile)
{
// Open the presentation as read-only.
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, false))
{
// Pass the presentation to the next CountSlides method
// and return the slide count.
return CountSlides(presentationDocument);
}
}
// Count the slides in the presentation.
public static int CountSlides(PresentationDocument presentationDocument)
{
// Check for a null document object.
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
int slidesCount = 0;
// Get the presentation part of document.
PresentationPart presentationPart = presentationDocument.PresentationPart;
// Get the slide count from the SlideParts.
if (presentationPart != null)
{
slidesCount = presentationPart.SlideParts.Count();
}
// Return the slide count to the previous method.
return slidesCount;
}
public static void GetSlideIdAndText(out string sldText, string docName, int index)
{
using (PresentationDocument ppt = PresentationDocument.Open(docName, false))
{
// Get the relationship ID of the first slide.
PresentationPart part = ppt.PresentationPart;
OpenXmlElementList slideIds = part.Presentation.SlideIdList.ChildElements;
string relId = (slideIds[index] as SlideId).RelationshipId;
// Get the slide part from the relationship ID.
SlidePart slide = (SlidePart)part.GetPartById(relId);
// Build a StringBuilder object.
StringBuilder paragraphText = new StringBuilder();
// Get the inner text of the slide:
IEnumerable<A.Text> texts = slide.Slide.Descendants<A.Text>();
foreach (A.Text text in texts)
{
paragraphText.Append(text.Text);
}
sldText = paragraphText.ToString();
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Converting to XPS/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
namespace Converting_to_XPS
{
class Program
{
static void Main(string[] args)
{
string MyDir = @"Files\";
//Instantiate a Presentation object that represents a presentation file
Presentation pres = new Presentation(MyDir + "Conversion.ppt");
//Saving the presentation to TIFF document
pres.Save(MyDir + "converted.xps", Aspose.Slides.Export.SaveFormat.Xps);
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Removing Row Or Column in Table/Aspose.Slides/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose.Slides
{
class Program
{
static void Main(string[] args)
{
string FileName = @"E:\Aspose\Aspose Vs VSTO\Aspose.Slides Vs VSTO Presentations v 1.1\Sample Files\Removing Row Or Column in Table.pptx";
Presentation MyPresentation = new Presentation(FileName);
//Get First Slide
ISlide sld = MyPresentation.Slides[0];
foreach (IShape shp in sld.Shapes)
if (shp is ITable)
{
ITable tbl = (ITable)shp;
tbl.Rows.RemoveAt(0, false);
}
MyPresentation.Save(FileName,Export.SaveFormat.Pptx);
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Conversion/ExportMediaFilestohtml.cs<|end_filename|>
using System.IO;
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Conversion
{
class ExportMediaFilestohtml
{
public static void Run()
{
//ExStart:ExportMediaFilestohtml
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Conversion();
// Loading a presentation
using (Presentation pres = new Presentation(dataDir + "Media File.pptx"))
{
string path = dataDir;
const string fileName = "ExportMediaFiles_out.html";
const string baseUri = "http://www.example.com/";
VideoPlayerHtmlController controller = new VideoPlayerHtmlController(path, fileName, baseUri);
// Setting HTML options
HtmlOptions htmlOptions = new HtmlOptions(controller);
SVGOptions svgOptions = new SVGOptions(controller);
htmlOptions.HtmlFormatter = HtmlFormatter.CreateCustomFormatter(controller);
htmlOptions.SlideImageFormat = SlideImageFormat.Svg(svgOptions);
// Saving the file
pres.Save(Path.Combine(path, fileName), SaveFormat.Html, htmlOptions);
}
//ExEnd:ExportMediaFilestohtml
}
}
}
<|start_filename|>Examples/CSharp/Slides/Layout/AddLayoutSlides.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.Layout
{
class AddLayoutSlides
{
public static void Run()
{
//ExStart:AddLayoutSlides
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_Layout();
// Instantiate Presentation class that represents the presentation file
using (Presentation presentation = new Presentation(dataDir + "AccessSlides.pptx"))
{
// Try to search by layout slide type
IMasterLayoutSlideCollection layoutSlides = presentation.Masters[0].LayoutSlides;
ILayoutSlide layoutSlide = layoutSlides.GetByType(SlideLayoutType.TitleAndObject) ?? layoutSlides.GetByType(SlideLayoutType.Title);
if (layoutSlide == null)
{
// The situation when a presentation doesn't contain some type of layouts.
// presentation File only contains Blank and Custom layout types.
// But layout slides with Custom types has different slide names,
// like "Title", "Title and Content", etc. And it is possible to use these
// names for layout slide selection.
// Also it is possible to use the set of placeholder shape types. For example,
// Title slide should have only Title pleceholder type, etc.
foreach (ILayoutSlide titleAndObjectLayoutSlide in layoutSlides)
{
if (titleAndObjectLayoutSlide.Name == "Title and Object")
{
layoutSlide = titleAndObjectLayoutSlide;
break;
}
}
if (layoutSlide == null)
{
foreach (ILayoutSlide titleLayoutSlide in layoutSlides)
{
if (titleLayoutSlide.Name == "Title")
{
layoutSlide = titleLayoutSlide;
break;
}
}
if (layoutSlide == null)
{
layoutSlide = layoutSlides.GetByType(SlideLayoutType.Blank);
if (layoutSlide == null)
{
layoutSlide = layoutSlides.Add(SlideLayoutType.TitleAndObject, "Title and Object");
}
}
}
}
// Adding empty slide with added layout slide
presentation.Slides.InsertEmptySlide(0, layoutSlide);
// Save presentation
presentation.Save(dataDir + "AddLayoutSlides_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AddLayoutSlides
}
}
}
<|start_filename|>Examples/CSharp/Shapes/FillShapesGradient.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class FillShapesGradient
{
public static void Run()
{
//ExStart:FillShapesGradient
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Prseetation class that represents the PPTX// Instantiate Prseetation class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of ellipse type
IShape shp = sld.Shapes.AddAutoShape(ShapeType.Ellipse, 50, 150, 75, 150);
// Apply some gradiant formatting to ellipse shape
shp.FillFormat.FillType = FillType.Gradient;
shp.FillFormat.GradientFormat.GradientShape = GradientShape.Linear;
// Set the Gradient Direction
shp.FillFormat.GradientFormat.GradientDirection = GradientDirection.FromCorner2;
// Add two Gradiant Stops
shp.FillFormat.GradientFormat.GradientStops.Add((float)1.0, PresetColor.Purple);
shp.FillFormat.GradientFormat.GradientStops.Add((float)0, PresetColor.Red);
//Write the PPTX file to disk
pres.Save(dataDir + "EllipseShpGrad_out.pptx", SaveFormat.Pptx);
}
//ExEnd:FillShapesGradient
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Adding Slide to Presentation/OpenXML Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using Drawing = DocumentFormat.OpenXml.Drawing;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Adding Slide to Presentation.pptx";
InsertNewSlide(FileName, 1, "My new slide");
}
// Insert a slide into the specified presentation.
public static void InsertNewSlide(string presentationFile, int position, string slideTitle)
{
// Open the source document as read/write.
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, true))
{
// Pass the source document and the position and title of the slide to be inserted to the next method.
InsertNewSlide(presentationDocument, position, slideTitle);
}
}
// Insert the specified slide into the presentation at the specified position.
public static void InsertNewSlide(PresentationDocument presentationDocument, int position, string slideTitle)
{
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
if (slideTitle == null)
{
throw new ArgumentNullException("slideTitle");
}
PresentationPart presentationPart = presentationDocument.PresentationPart;
// Verify that the presentation is not empty.
if (presentationPart == null)
{
throw new InvalidOperationException("The presentation document is empty.");
}
// Declare and instantiate a new slide.
Slide slide = new Slide(new CommonSlideData(new ShapeTree()));
uint drawingObjectId = 1;
// Construct the slide content.
// Specify the non-visual properties of the new slide.
NonVisualGroupShapeProperties nonVisualProperties = slide.CommonSlideData.ShapeTree.AppendChild(new NonVisualGroupShapeProperties());
nonVisualProperties.NonVisualDrawingProperties = new NonVisualDrawingProperties() { Id = 1, Name = "" };
nonVisualProperties.NonVisualGroupShapeDrawingProperties = new NonVisualGroupShapeDrawingProperties();
nonVisualProperties.ApplicationNonVisualDrawingProperties = new ApplicationNonVisualDrawingProperties();
// Specify the group shape properties of the new slide.
slide.CommonSlideData.ShapeTree.AppendChild(new GroupShapeProperties());
// Declare and instantiate the title shape of the new slide.
Shape titleShape = slide.CommonSlideData.ShapeTree.AppendChild(new Shape());
drawingObjectId++;
// Specify the required shape properties for the title shape.
titleShape.NonVisualShapeProperties = new NonVisualShapeProperties
(new NonVisualDrawingProperties() { Id = drawingObjectId, Name = "Title" },
new NonVisualShapeDrawingProperties(new Drawing.ShapeLocks() { NoGrouping = true }),
new ApplicationNonVisualDrawingProperties(new PlaceholderShape() { Type = PlaceholderValues.Title }));
titleShape.ShapeProperties = new ShapeProperties();
// Specify the text of the title shape.
titleShape.TextBody = new TextBody(new Drawing.BodyProperties(),
new Drawing.ListStyle(),
new Drawing.Paragraph(new Drawing.Run(new Drawing.Text() { Text = slideTitle })));
// Declare and instantiate the body shape of the new slide.
Shape bodyShape = slide.CommonSlideData.ShapeTree.AppendChild(new Shape());
drawingObjectId++;
// Specify the required shape properties for the body shape.
bodyShape.NonVisualShapeProperties = new NonVisualShapeProperties(new NonVisualDrawingProperties() { Id = drawingObjectId, Name = "Content Placeholder" },
new NonVisualShapeDrawingProperties(new Drawing.ShapeLocks() { NoGrouping = true }),
new ApplicationNonVisualDrawingProperties(new PlaceholderShape() { Index = 1 }));
bodyShape.ShapeProperties = new ShapeProperties();
// Specify the text of the body shape.
bodyShape.TextBody = new TextBody(new Drawing.BodyProperties(),
new Drawing.ListStyle(),
new Drawing.Paragraph());
// Create the slide part for the new slide.
SlidePart slidePart = presentationPart.AddNewPart<SlidePart>();
// Save the new slide part.
slide.Save(slidePart);
// Modify the slide ID list in the presentation part.
// The slide ID list should not be null.
SlideIdList slideIdList = presentationPart.Presentation.SlideIdList;
// Find the highest slide ID in the current list.
uint maxSlideId = 1;
SlideId prevSlideId = null;
foreach (SlideId slideId in slideIdList.ChildElements)
{
if (slideId.Id > maxSlideId)
{
maxSlideId = slideId.Id;
}
position--;
if (position == 0)
{
prevSlideId = slideId;
}
}
maxSlideId++;
// Get the ID of the previous slide.
SlidePart lastSlidePart;
if (prevSlideId != null)
{
lastSlidePart = (SlidePart)presentationPart.GetPartById(prevSlideId.RelationshipId);
}
else
{
lastSlidePart = (SlidePart)presentationPart.GetPartById(((SlideId)(slideIdList.ChildElements[0])).RelationshipId);
}
// Use the same slide layout as that of the previous slide.
if (null != lastSlidePart.SlideLayoutPart)
{
slidePart.AddPart(lastSlidePart.SlideLayoutPart);
}
// Insert the new slide into the slide list after the previous slide.
SlideId newSlideId = slideIdList.InsertAfter(new SlideId(), prevSlideId);
newSlideId.Id = maxSlideId;
newSlideId.RelationshipId = presentationPart.GetIdOfPart(slidePart);
// Save the modified presentation.
presentationPart.Presentation.Save();
}
}
}
<|start_filename|>Examples/CSharp/Charts/NormalCharts.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Charts;
using System.Drawing;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class NormalCharts
{
public static void Run()
{
//ExStart:NormalCharts
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Presentation class that represents PPTX file
Presentation pres = new Presentation();
// Access first slide
ISlide sld = pres.Slides[0];
// Add chart with default data
IChart chart = sld.Shapes.AddChart(ChartType.ClusteredColumn, 0, 0, 500, 500);
// Setting chart Title
// Chart.ChartTitle.TextFrameForOverriding.Text = "Sample Title";
chart.ChartTitle.AddTextFrameForOverriding("Sample Title");
chart.ChartTitle.TextFrameForOverriding.TextFrameFormat.CenterText = NullableBool.True;
chart.ChartTitle.Height = 20;
chart.HasTitle = true;
// Set first series to Show Values
chart.ChartData.Series[0].Labels.DefaultDataLabelFormat.ShowValue = true;
// Setting the index of chart data sheet
int defaultWorksheetIndex = 0;
// Getting the chart data worksheet
IChartDataWorkbook fact = chart.ChartData.ChartDataWorkbook;
// Delete default generated series and categories
chart.ChartData.Series.Clear();
chart.ChartData.Categories.Clear();
int s = chart.ChartData.Series.Count;
s = chart.ChartData.Categories.Count;
// Adding new series
chart.ChartData.Series.Add(fact.GetCell(defaultWorksheetIndex, 0, 1, "Series 1"), chart.Type);
chart.ChartData.Series.Add(fact.GetCell(defaultWorksheetIndex, 0, 2, "Series 2"), chart.Type);
// Adding new categories
chart.ChartData.Categories.Add(fact.GetCell(defaultWorksheetIndex, 1, 0, "Caetegoty 1"));
chart.ChartData.Categories.Add(fact.GetCell(defaultWorksheetIndex, 2, 0, "Caetegoty 2"));
chart.ChartData.Categories.Add(fact.GetCell(defaultWorksheetIndex, 3, 0, "Caetegoty 3"));
// Take first chart series
IChartSeries series = chart.ChartData.Series[0];
// Now populating series data
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 1, 1, 20));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 2, 1, 50));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 3, 1, 30));
// Setting fill color for series
series.Format.Fill.FillType = FillType.Solid;
series.Format.Fill.SolidFillColor.Color = Color.Red;
// Take second chart series
series = chart.ChartData.Series[1];
// Now populating series data
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 1, 2, 30));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 2, 2, 10));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 3, 2, 60));
// Setting fill color for series
series.Format.Fill.FillType = FillType.Solid;
series.Format.Fill.SolidFillColor.Color = Color.Green;
// First label will be show Category name
IDataLabel lbl = series.DataPoints[0].Label;
lbl.DataLabelFormat.ShowCategoryName = true;
lbl = series.DataPoints[1].Label;
lbl.DataLabelFormat.ShowSeriesName = true;
// Show value for third label
lbl = series.DataPoints[2].Label;
lbl.DataLabelFormat.ShowValue = true;
lbl.DataLabelFormat.ShowSeriesName = true;
lbl.DataLabelFormat.Separator = "/";
// Save presentation with chart
pres.Save(dataDir + "AsposeChart_out.pptx", SaveFormat.Pptx);
//ExEnd:NormalCharts
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Format Text using VSTO and Aspose.Slides/Aspose Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
//Open the presentation
Presentation pres = new Presentation("source.ppt");
//Add Verdana font
FontEntity font = pres.Fonts[0];
FontEntity verdanaFont = new FontEntity(pres, font);
verdanaFont.FontName = "Verdana";
int verdanaFontIndex = pres.Fonts.Add(verdanaFont);
//Access the first slide
Slide slide = pres.GetSlideByPosition(1);
//Access the third shape
Shape shp = slide.Shapes[2];
//Change its text's font to Verdana and height to 32
TextFrame tf = shp.TextFrame;
Paragraph para = tf.Paragraphs[0];
Portion port = para.Portions[0];
port.FontIndex = verdanaFontIndex;
port.FontHeight = 32;
//Bolden it
port.FontBold = true;
//Italicize it
port.FontItalic = true;
//Change text color
port.FontColor = Color.FromArgb(0x33, 0x33, 0xCC);
//Change shape background color
shp.FillFormat.Type = FillType.Solid;
shp.FillFormat.ForeColor = Color.FromArgb(0xCC, 0xCC, 0xFF);
//Write the output to disk
pres.Write("outAspose.ppt");
}
}
}
<|start_filename|>Examples/CSharp/Charts/ChartEntities.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using System.Drawing;
using Aspose.Slides.Export;
using Aspose.Slides.Charts;
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class ChartEntities
{
public static void Run()
{
//ExStart:ChartEntities
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiating presentation// Instantiating presentation
Presentation pres = new Presentation();
// Accessing the first slide
ISlide slide = pres.Slides[0];
// Adding the sample chart
IChart chart = slide.Shapes.AddChart(ChartType.LineWithMarkers, 50, 50, 500, 400);
// Setting Chart Titile
chart.HasTitle = true;
chart.ChartTitle.AddTextFrameForOverriding("");
IPortion chartTitle = chart.ChartTitle.TextFrameForOverriding.Paragraphs[0].Portions[0];
chartTitle.Text = "Sample Chart";
chartTitle.PortionFormat.FillFormat.FillType = FillType.Solid;
chartTitle.PortionFormat.FillFormat.SolidFillColor.Color = Color.Gray;
chartTitle.PortionFormat.FontHeight = 20;
chartTitle.PortionFormat.FontBold = NullableBool.True;
chartTitle.PortionFormat.FontItalic = NullableBool.True;
// Setting Major grid lines format for value axis
chart.Axes.VerticalAxis.MajorGridLinesFormat.Line.FillFormat.FillType = FillType.Solid;
chart.Axes.VerticalAxis.MajorGridLinesFormat.Line.FillFormat.SolidFillColor.Color = Color.Blue;
chart.Axes.VerticalAxis.MajorGridLinesFormat.Line.Width = 5;
chart.Axes.VerticalAxis.MajorGridLinesFormat.Line.DashStyle = LineDashStyle.DashDot;
// Setting Minor grid lines format for value axis
chart.Axes.VerticalAxis.MinorGridLinesFormat.Line.FillFormat.FillType = FillType.Solid;
chart.Axes.VerticalAxis.MinorGridLinesFormat.Line.FillFormat.SolidFillColor.Color = Color.Red;
chart.Axes.VerticalAxis.MinorGridLinesFormat.Line.Width = 3;
// Setting value axis number format
chart.Axes.VerticalAxis.IsNumberFormatLinkedToSource = false;
chart.Axes.VerticalAxis.DisplayUnit = DisplayUnitType.Thousands;
chart.Axes.VerticalAxis.NumberFormat = "0.0%";
// Setting chart maximum, minimum values
chart.Axes.VerticalAxis.IsAutomaticMajorUnit = false;
chart.Axes.VerticalAxis.IsAutomaticMaxValue = false;
chart.Axes.VerticalAxis.IsAutomaticMinorUnit = false;
chart.Axes.VerticalAxis.IsAutomaticMinValue = false;
chart.Axes.VerticalAxis.MaxValue = 15f;
chart.Axes.VerticalAxis.MinValue = -2f;
chart.Axes.VerticalAxis.MinorUnit = 0.5f;
chart.Axes.VerticalAxis.MajorUnit = 2.0f;
// Setting Value Axis Text Properties
IChartPortionFormat txtVal = chart.Axes.VerticalAxis.TextFormat.PortionFormat;
txtVal.FontBold = NullableBool.True;
txtVal.FontHeight = 16;
txtVal.FontItalic = NullableBool.True;
txtVal.FillFormat.FillType = FillType.Solid; ;
txtVal.FillFormat.SolidFillColor.Color = Color.DarkGreen;
txtVal.LatinFont = new FontData("Times New Roman");
// Setting value axis title
chart.Axes.VerticalAxis.HasTitle = true;
chart.Axes.VerticalAxis.Title.AddTextFrameForOverriding("");
IPortion valtitle = chart.Axes.VerticalAxis.Title.TextFrameForOverriding.Paragraphs[0].Portions[0];
valtitle.Text = "Primary Axis";
valtitle.PortionFormat.FillFormat.FillType = FillType.Solid;
valtitle.PortionFormat.FillFormat.SolidFillColor.Color = Color.Gray;
valtitle.PortionFormat.FontHeight = 20;
valtitle.PortionFormat.FontBold = NullableBool.True;
valtitle.PortionFormat.FontItalic = NullableBool.True;
// Setting value axis line format : Now Obselete
// chart.Axes.VerticalAxis.aVerticalAxis.l.AxisLine.Width = 10;
// chart.Axes.VerticalAxis.AxisLine.FillFormat.FillType = FillType.Solid;
// Chart.Axes.VerticalAxis.AxisLine.FillFormat.SolidFillColor.Color = Color.Red;
// Setting Major grid lines format for Category axis
chart.Axes.HorizontalAxis.MajorGridLinesFormat.Line.FillFormat.FillType = FillType.Solid;
chart.Axes.HorizontalAxis.MajorGridLinesFormat.Line.FillFormat.SolidFillColor.Color = Color.Green;
chart.Axes.HorizontalAxis.MajorGridLinesFormat.Line.Width = 5;
// Setting Minor grid lines format for Category axis
chart.Axes.HorizontalAxis.MinorGridLinesFormat.Line.FillFormat.FillType = FillType.Solid;
chart.Axes.HorizontalAxis.MinorGridLinesFormat.Line.FillFormat.SolidFillColor.Color = Color.Yellow;
chart.Axes.HorizontalAxis.MinorGridLinesFormat.Line.Width = 3;
// Setting Category Axis Text Properties
IChartPortionFormat txtCat = chart.Axes.HorizontalAxis.TextFormat.PortionFormat;
txtCat.FontBold = NullableBool.True;
txtCat.FontHeight = 16;
txtCat.FontItalic = NullableBool.True;
txtCat.FillFormat.FillType = FillType.Solid; ;
txtCat.FillFormat.SolidFillColor.Color = Color.Blue;
txtCat.LatinFont = new FontData("Arial");
// Setting Category Titile
chart.Axes.HorizontalAxis.HasTitle = true;
chart.Axes.HorizontalAxis.Title.AddTextFrameForOverriding("");
IPortion catTitle = chart.Axes.HorizontalAxis.Title.TextFrameForOverriding.Paragraphs[0].Portions[0];
catTitle.Text = "Sample Category";
catTitle.PortionFormat.FillFormat.FillType = FillType.Solid;
catTitle.PortionFormat.FillFormat.SolidFillColor.Color = Color.Gray;
catTitle.PortionFormat.FontHeight = 20;
catTitle.PortionFormat.FontBold = NullableBool.True;
catTitle.PortionFormat.FontItalic = NullableBool.True;
// Setting category axis lable position
chart.Axes.HorizontalAxis.TickLabelPosition = TickLabelPositionType.Low;
// Setting category axis lable rotation angle
chart.Axes.HorizontalAxis.TickLabelRotationAngle = 45;
// Setting Legends Text Properties
IChartPortionFormat txtleg = chart.Legend.TextFormat.PortionFormat;
txtleg.FontBold = NullableBool.True;
txtleg.FontHeight = 16;
txtleg.FontItalic = NullableBool.True;
txtleg.FillFormat.FillType = FillType.Solid; ;
txtleg.FillFormat.SolidFillColor.Color = Color.DarkRed;
// Set show chart legends without overlapping chart
chart.Legend.Overlay = true;
// Ploting first series on secondary value axis
// Chart.ChartData.Series[0].PlotOnSecondAxis = true;
// Setting chart back wall color
chart.BackWall.Thickness = 1;
chart.BackWall.Format.Fill.FillType = FillType.Solid;
chart.BackWall.Format.Fill.SolidFillColor.Color = Color.Orange;
chart.Floor.Format.Fill.FillType = FillType.Solid;
chart.Floor.Format.Fill.SolidFillColor.Color = Color.Red;
// Setting Plot area color
chart.PlotArea.Format.Fill.FillType = FillType.Solid;
chart.PlotArea.Format.Fill.SolidFillColor.Color = Color.LightCyan;
// Save Presentation
pres.Save(dataDir + "FormattedChart_out.pptx", SaveFormat.Pptx);
//ExEnd:ChartEntities
}
}
}
<|start_filename|>Plugins/OpenXML/Missing Features/Managing Slides Transitions/Program.cs<|end_filename|>
using Aspose.Slides;
using Aspose.Slides.Export;
using Aspose.Slides.SlideShow;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\Sample Files\";
string FileName = FilePath + "Managing Slides Transitions.pptx";
//Instantiate Presentation class that represents a presentation file
using (Presentation pres = new Presentation(FileName))
{
//Apply circle type transition on slide 1
pres.Slides[0].SlideShowTransition.Type = TransitionType.Circle;
//Apply comb type transition on slide 2
pres.Slides[1].SlideShowTransition.Type = TransitionType.Comb;
//Apply zoom type transition on slide 3
pres.Slides[2].SlideShowTransition.Type = TransitionType.Zoom;
//Write the presentation to disk
pres.Save(FileName, SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Get all the External Hyperlinks/OpenXML Presentation/Program.cs<|end_filename|>
using DocumentFormat.OpenXml.Packaging;
using System;
using System.Collections.Generic;
using Drawing = DocumentFormat.OpenXml.Drawing;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Get all the External Eyperlinks.pptx";
foreach (string s in GetAllExternalHyperlinksInPresentation(FileName))
Console.WriteLine(s);
}
// Returns all the external hyperlinks in the slides of a presentation.
public static IEnumerable<String> GetAllExternalHyperlinksInPresentation(string fileName)
{
// Declare a list of strings.
List<string> ret = new List<string>();
// Open the presentation file as read-only.
using (PresentationDocument document = PresentationDocument.Open(fileName, false))
{
// Iterate through all the slide parts in the presentation part.
foreach (SlidePart slidePart in document.PresentationPart.SlideParts)
{
IEnumerable<Drawing.HyperlinkType> links = slidePart.Slide.Descendants<Drawing.HyperlinkType>();
// Iterate through all the links in the slide part.
foreach (Drawing.HyperlinkType link in links)
{
// Iterate through all the external relationships in the slide part.
foreach (HyperlinkRelationship relation in slidePart.HyperlinkRelationships)
{
// If the relationship ID matches the link ID…
if (relation.Id.Equals(link.Id))
{
// Add the URI of the external relationship to the list of strings.
ret.Add(relation.Uri.AbsoluteUri);
}
}
}
}
}
// Return the list of strings.
return ret;
}
}
}
<|start_filename|>Examples/CSharp/Shapes/FindShapeInSlide.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using System;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class FindShapeInSlide
{
//ExStart:FindShapeInSlide
public static void Run()
{
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate a Presentation class that represents the presentation file
using (Presentation p = new Presentation(dataDir + "FindingShapeInSlide.pptx"))
{
ISlide slide = p.Slides[0];
// Alternative text of the shape to be found
IShape shape = FindShape(slide, "Shape1");
if (shape != null)
{
Console.WriteLine("Shape Name: " + shape.Name);
}
}
}
// Method implementation to find a shape in a slide using its alternative text
public static IShape FindShape(ISlide slide, string alttext)
{
// Iterating through all shapes inside the slide
for (int i = 0; i < slide.Shapes.Count; i++)
{
// If the alternative text of the slide matches with the required one then
// Return the shape
if (slide.Shapes[i].AlternativeText.CompareTo(alttext) == 0)
return slide.Shapes[i];
}
return null;
}
//ExEnd:FindShapeInSlide
}
}
<|start_filename|>Examples/CSharp/Presentations/Properties/AccessProperties.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations
{
public class AccessProperties
{
public static void Run()
{
//ExStart:AccessProperties
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationProperties();
// Accessing the Document Properties of a Password Protected Presentation without Password
// creating instance of load options to set the presentation access password
LoadOptions loadOptions = new LoadOptions();
// Setting the access password to null
loadOptions.Password = <PASSWORD>;
// Setting the access to document properties
loadOptions.OnlyLoadDocumentProperties = true;
// Opening the presentation file by passing the file path and load options to the constructor of Presentation class
Presentation pres = new Presentation(dataDir + "AccessProperties.pptx", loadOptions);
// Getting Document Properties
IDocumentProperties docProps = pres.DocumentProperties;
System.Console.WriteLine("Name of Application : " + docProps.NameOfApplication);
//ExEnd:AccessProperties
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Move a slide to a new position/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Move a slide to a new position.pptx";
MoveSlide(FileName, 1, 2);
}
// Move a slide to a different position in the slide order in the presentation.
public static void MoveSlide(string presentationFile, int from, int to)
{
//Instantiate PresentationEx class to load the source PPTX file
using (Presentation pres = new Presentation(presentationFile))
{
//Get the slide whose position is to be changed
ISlide sld = pres.Slides[from];
ISlide sld2 = pres.Slides[to];
//Set the new position for the slide
sld2.SlideNumber = from;
sld.SlideNumber = to;
//Write the PPTX to disk
pres.Save(presentationFile,Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Examples/CSharp/Shapes/FormatJoinStyles.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using System.Drawing;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class FormatJoinStyles
{
public static void Run()
{
//ExStart:FormatJoinStyles
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Prseetation class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add three autoshapes of rectangle type
IShape shp1 = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 100, 150, 75);
IShape shp2 = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 300, 100, 150, 75);
IShape shp3 = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 250, 150, 75);
// Set the fill color of the rectangle shape
shp1.FillFormat.FillType = FillType.Solid;
shp1.FillFormat.SolidFillColor.Color = Color.Black;
shp2.FillFormat.FillType = FillType.Solid;
shp2.FillFormat.SolidFillColor.Color = Color.Black;
shp3.FillFormat.FillType = FillType.Solid;
shp3.FillFormat.SolidFillColor.Color = Color.Black;
// Set the line width
shp1.LineFormat.Width = 15;
shp2.LineFormat.Width = 15;
shp3.LineFormat.Width = 15;
// Set the color of the line of rectangle
shp1.LineFormat.FillFormat.FillType = FillType.Solid;
shp1.LineFormat.FillFormat.SolidFillColor.Color = Color.Blue;
shp2.LineFormat.FillFormat.FillType = FillType.Solid;
shp2.LineFormat.FillFormat.SolidFillColor.Color = Color.Blue;
shp3.LineFormat.FillFormat.FillType = FillType.Solid;
shp3.LineFormat.FillFormat.SolidFillColor.Color = Color.Blue;
// Set the Join Style
shp1.LineFormat.JoinStyle = LineJoinStyle.Miter;
shp2.LineFormat.JoinStyle = LineJoinStyle.Bevel;
shp3.LineFormat.JoinStyle = LineJoinStyle.Round;
// Add text to each rectangle
((IAutoShape)shp1).TextFrame.Text = "This is Miter Join Style";
((IAutoShape)shp2).TextFrame.Text = "This is Bevel Join Style";
((IAutoShape)shp3).TextFrame.Text = "This is Round Join Style";
//Write the PPTX file to disk
pres.Save(dataDir + "RectShpLnJoin_out.pptx", SaveFormat.Pptx);
}
//ExEnd:FormatJoinStyles
}
}
}
<|start_filename|>Examples/CSharp/Charts/SetDataLabelsPercentageSign.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class SetDataLabelsPercentageSign
{
public static void Run()
{
//ExStart:SetDataLabelsPercentageSign
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create an instance of Presentation class
Presentation presentation = new Presentation();
// Get reference of the slide
ISlide slide = presentation.Slides[0];
// Add PercentsStackedColumn chart on a slide
IChart chart = slide.Shapes.AddChart(ChartType.PercentsStackedColumn, 20, 20, 500, 400);
// Set NumberFormatLinkedToSource to false
chart.Axes.VerticalAxis.IsNumberFormatLinkedToSource = false;
chart.Axes.VerticalAxis.NumberFormat = "0.00%";
chart.ChartData.Series.Clear();
int defaultWorksheetIndex = 0;
// Getting the chart data worksheet
IChartDataWorkbook workbook = chart.ChartData.ChartDataWorkbook;
// Add new series
IChartSeries series = chart.ChartData.Series.Add(workbook.GetCell(defaultWorksheetIndex, 0, 1, "Reds"), chart.Type);
series.DataPoints.AddDataPointForBarSeries(workbook.GetCell(defaultWorksheetIndex, 1, 1, 0.30));
series.DataPoints.AddDataPointForBarSeries(workbook.GetCell(defaultWorksheetIndex, 2, 1, 0.50));
series.DataPoints.AddDataPointForBarSeries(workbook.GetCell(defaultWorksheetIndex, 3, 1, 0.80));
series.DataPoints.AddDataPointForBarSeries(workbook.GetCell(defaultWorksheetIndex, 4, 1, 0.65));
// Setting the fill color of series
series.Format.Fill.FillType = FillType.Solid;
series.Format.Fill.SolidFillColor.Color = Color.Red;
// Setting LabelFormat properties
series.Labels.DefaultDataLabelFormat.ShowValue = true;
series.Labels.DefaultDataLabelFormat.IsNumberFormatLinkedToSource = false;
series.Labels.DefaultDataLabelFormat.NumberFormat = "0.0%";
series.Labels.DefaultDataLabelFormat.TextFormat.PortionFormat.FontHeight = 10;
series.Labels.DefaultDataLabelFormat.TextFormat.PortionFormat.FillFormat.FillType = FillType.Solid;
series.Labels.DefaultDataLabelFormat.TextFormat.PortionFormat.FillFormat.SolidFillColor.Color = Color.White;
series.Labels.DefaultDataLabelFormat.ShowValue = true;
// Add new series
IChartSeries series2 = chart.ChartData.Series.Add(workbook.GetCell(defaultWorksheetIndex, 0, 2, "Blues"), chart.Type);
series2.DataPoints.AddDataPointForBarSeries(workbook.GetCell(defaultWorksheetIndex, 1, 2, 0.70));
series2.DataPoints.AddDataPointForBarSeries(workbook.GetCell(defaultWorksheetIndex, 2, 2, 0.50));
series2.DataPoints.AddDataPointForBarSeries(workbook.GetCell(defaultWorksheetIndex, 3, 2, 0.20));
series2.DataPoints.AddDataPointForBarSeries(workbook.GetCell(defaultWorksheetIndex, 4, 2, 0.35));
// Setting Fill type and color
series2.Format.Fill.FillType = FillType.Solid;
series2.Format.Fill.SolidFillColor.Color = Color.Blue;
series2.Labels.DefaultDataLabelFormat.ShowValue = true;
series2.Labels.DefaultDataLabelFormat.IsNumberFormatLinkedToSource = false;
series2.Labels.DefaultDataLabelFormat.NumberFormat = "0.0%";
series2.Labels.DefaultDataLabelFormat.TextFormat.PortionFormat.FontHeight = 10;
series2.Labels.DefaultDataLabelFormat.TextFormat.PortionFormat.FillFormat.FillType = FillType.Solid;
series2.Labels.DefaultDataLabelFormat.TextFormat.PortionFormat.FillFormat.SolidFillColor.Color = Color.White;
// Write presentation to disk
presentation.Save(dataDir + "SetDataLabelsPercentageSign_out.pptx", SaveFormat.Pptx);
//ExEnd:SetDataLabelsPercentageSign
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Create a New Presentation/Aspose Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
//Create a presentation
Presentation pres = new Presentation();
//Add the title slide
Slide slide = pres.AddTitleSlide();
//Set the title text
((TextHolder)slide.Placeholders[0]).Text = "Slide Title Heading";
//Set the sub title text
((TextHolder)slide.Placeholders[1]).Text = "Slide Title Sub-Heading";
//Write output to disk
pres.Write("outAsposeSlides.ppt");
}
}
}
<|start_filename|>Examples/CSharp/SmartArts/SmartArtNodeLevel.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.SmartArt;
using System;
namespace Aspose.Slides.Examples.CSharp.SmartArts
{
public class SmartArtNodeLevel
{
public static void Run()
{
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_SmartArts();
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Get the titles of all the slides/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
using System;
using System.Collections.Generic;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Get the titles of all the slides.pptx";
foreach (string s in GetSlideTitles(FileName))
Console.WriteLine(s);
Console.ReadKey();
}
// Get a list of the titles of all the slides in the presentation.
public static IList<string> GetSlideTitles(string presentationFile)
{
// Create a new linked list of strings.
List<string> texts = new List<string>();
//Instantiate PresentationEx class that represents PPTX
using (Presentation pres = new Presentation(presentationFile))
{
//Access all the slides
foreach (ISlide sld in pres.Slides)
{
//Iterate through shapes to find the placeholder
foreach (Shape shp in sld.Shapes)
if (shp.Placeholder != null)
{
if (IsTitleShape(shp))
{
//get the text of placeholder
texts.Add(((AutoShape)shp).TextFrame.Text);
}
}
}
}
// Return an array of strings.
return texts;
}
// Determines whether the shape is a title shape.
private static bool IsTitleShape(Shape shape)
{
switch (shape.Placeholder.Type)
{
case PlaceholderType.Title:
case PlaceholderType.CenteredTitle:
return true;
default:
return false;
}
}
}
}
<|start_filename|>Examples/CSharp/Shapes/RemoveShape.cs<|end_filename|>
using System;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class RemoveShape
{
public static void Run()
{
//ExStart:RemoveShape
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create Presentation object
Presentation pres = new Presentation();
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of rectangle type
IShape shp1 = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 40, 150, 50);
IShape shp2 = sld.Shapes.AddAutoShape(ShapeType.Moon, 160, 40, 150, 50);
String alttext = "User Defined";
int iCount = sld.Shapes.Count;
for (int i = 0; i < iCount; i++)
{
AutoShape ashp = (AutoShape)sld.Shapes[0];
if (String.Compare(ashp.AlternativeText, alttext, StringComparison.Ordinal) == 0)
{
sld.Shapes.Remove(ashp);
}
}
// Save presentation to disk
pres.Save(dataDir + "RemoveShape_out.pptx", SaveFormat.Pptx);
//ExEnd:RemoveShape
}
}
}
<|start_filename|>Examples/CSharp/Shapes/AddVideoFrameFromWebSource.cs<|end_filename|>
using System;
using System.Net;
using Aspose.Slides;
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class AddVideoFrameFromWebSource
{
//ExStart:AddVideoFrameFromWebSource
public static void Run()
{
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
using (Presentation pres = new Presentation())
{
AddVideoFromYouTube(pres, "Tj75Arhq5ho");
pres.Save(dataDir + "AddVideoFrameFromWebSource_out.pptx", SaveFormat.Pptx);
}
}
private static void AddVideoFromYouTube(Presentation pres, string videoId)
{
//add videoFrame
IVideoFrame videoFrame = pres.Slides[0].Shapes.AddVideoFrame(10, 10, 427, 240, "https://www.youtube.com/embed/" + videoId);
videoFrame.PlayMode = VideoPlayModePreset.Auto;
//load thumbnail
using (WebClient client = new WebClient())
{
string thumbnailUri = "http://img.youtube.com/vi/" + videoId + "/hqdefault.jpg";
videoFrame.PictureFormat.Picture.Image = pres.Images.AddImage(client.DownloadData(thumbnailUri));
}
}
//ExEnd:AddVideoFrameFromWebSource
}
}
<|start_filename|>Examples/CSharp/Shapes/CreateBoundsShapeThumbnail.cs<|end_filename|>
using System.Drawing;
using System.Drawing.Imaging;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class CreateBoundsShapeThumbnail
{
public static void Run()
{
//ExStart:CreateBoundsShapeThumbnail
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Instantiate a Presentation class that represents the presentation file
using (Presentation presentation = new Presentation(dataDir + "HelloWorld.pptx"))
{
// Create a Appearance bound shape image
using (Bitmap bitmap = presentation.Slides[0].Shapes[0].GetThumbnail(ShapeThumbnailBounds.Appearance, 1, 1))
{
// Save the image to disk in PNG format
bitmap.Save(dataDir + "Shape_thumbnail_Bound_Shape_out.png", ImageFormat.Png);
}
}
//ExEnd:CreateBoundsShapeThumbnail
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Adding Picture Frame with Animation/VSTO Slides/ThisAddIn.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using Office = Microsoft.Office.Core;
namespace VSTO_Slides
{
public partial class ThisAddIn
{
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
//Creating empty presentation
PowerPoint.Presentation pres = Globals.ThisAddIn.Application.Presentations.Add(Microsoft.Office.Core.MsoTriState.msoFalse);
//Add a blank slide
PowerPoint.Slide sld = pres.Slides.Add(1, PowerPoint.PpSlideLayout.ppLayoutBlank);
//Add Picture Frame
PowerPoint.Shape PicFrame = sld.Shapes.AddPicture("pic.jpeg",
Microsoft.Office.Core.MsoTriState.msoTriStateMixed,
Microsoft.Office.Core.MsoTriState.msoTriStateMixed, 150, 100, 400, 300);
//Applying animation on picture frame
PicFrame.AnimationSettings.EntryEffect = Microsoft.Office.Interop.PowerPoint.PpEntryEffect.ppEffectBoxIn;
//Saving Presentation
pres.SaveAs("VSTOAnim.ppt", PowerPoint.PpSaveAsFileType.ppSaveAsPresentation,
Microsoft.Office.Core.MsoTriState.msoFalse);
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Examples/CSharp/Slides/CRUD/CloneWithinSamePresentationToEnd.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.CRUD
{
public class CloneWithinSamePresentationToEnd
{
public static void Run()
{
//ExStart:CloneWithinSamePresentationToEnd
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_CRUD();
// Instantiate Presentation class that represents a presentation file
using (Presentation pres = new Presentation(dataDir + "CloneWithinSamePresentationToEnd.pptx"))
{
// Clone the desired slide to the end of the collection of slides in the same presentation
ISlideCollection slds = pres.Slides;
slds.AddClone(pres.Slides[0]);
// Write the modified presentation to disk
pres.Save(dataDir + "Aspose_CloneWithinSamePresentationToEnd_out.pptx", SaveFormat.Pptx);
}
//ExEnd:CloneWithinSamePresentationToEnd
}
}
}
<|start_filename|>Examples/CSharp/Shapes/AddArrowShapedLineToSlide.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using System.Drawing;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class AddArrowShapedLineToSlide
{
public static void Run()
{
//ExStart:AddArrowShapedLineToSlide
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate PresentationEx class that represents the PPTX file
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add an autoshape of type line
IAutoShape shp = sld.Shapes.AddAutoShape(ShapeType.Line, 50, 150, 300, 0);
// Apply some formatting on the line
shp.LineFormat.Style = LineStyle.ThickBetweenThin;
shp.LineFormat.Width = 10;
shp.LineFormat.DashStyle = LineDashStyle.DashDot;
shp.LineFormat.BeginArrowheadLength = LineArrowheadLength.Short;
shp.LineFormat.BeginArrowheadStyle = LineArrowheadStyle.Oval;
shp.LineFormat.EndArrowheadLength = LineArrowheadLength.Long;
shp.LineFormat.EndArrowheadStyle = LineArrowheadStyle.Triangle;
shp.LineFormat.FillFormat.FillType = FillType.Solid;
shp.LineFormat.FillFormat.SolidFillColor.Color = Color.Maroon;
//Write the PPTX to Disk
pres.Save(dataDir + "LineShape2_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AddArrowShapedLineToSlide
}
}
}
<|start_filename|>Examples/CSharp/Charts/SetInvertFillColorChart.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class SetInvertFillColorChart
{
public static void Run()
{
//ExStart:SetInvertFillColorChart
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
Color inverColor = Color.Red;
using (Presentation pres = new Presentation())
{
IChart chart = pres.Slides[0].Shapes.AddChart(ChartType.ClusteredColumn, 100, 100, 400, 300);
IChartDataWorkbook workBook = chart.ChartData.ChartDataWorkbook;
chart.ChartData.Series.Clear();
chart.ChartData.Categories.Clear();
// Adding new series and categories
chart.ChartData.Series.Add(workBook.GetCell(0, 0, 1, "Series 1"), chart.Type);
chart.ChartData.Categories.Add(workBook.GetCell(0, 1, 0, "Category 1"));
chart.ChartData.Categories.Add(workBook.GetCell(0, 2, 0, "Category 2"));
chart.ChartData.Categories.Add(workBook.GetCell(0, 3, 0, "Category 3"));
// Take first chart series and populating series data.
IChartSeries series = chart.ChartData.Series[0];
series.DataPoints.AddDataPointForBarSeries(workBook.GetCell(0, 1, 1, -20));
series.DataPoints.AddDataPointForBarSeries(workBook.GetCell(0, 2, 1, 50));
series.DataPoints.AddDataPointForBarSeries(workBook.GetCell(0, 3, 1, -30));
var seriesColor = series.GetAutomaticSeriesColor();
series.InvertIfNegative = true;
series.Format.Fill.FillType = FillType.Solid;
series.Format.Fill.SolidFillColor.Color = seriesColor;
series.InvertedSolidFillColor.Color = inverColor;
pres.Save(dataDir + "SetInvertFillColorChart_out.pptx", SaveFormat.Pptx);
}
//ExEnd:SetInvertFillColorChart
}
}
}
<|start_filename|>Examples/CSharp/Shapes/ApplyBevelEffects.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class ApplyBevelEffects
{
public static void Run()
{
//ExStart:ApplyBevelEffects
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create an instance of Presentation class
Presentation pres = new Presentation();
ISlide slide = pres.Slides[0];
// Add a shape on slide
IAutoShape shape = slide.Shapes.AddAutoShape(ShapeType.Ellipse, 30, 30, 100, 100);
shape.FillFormat.FillType = FillType.Solid;
shape.FillFormat.SolidFillColor.Color = Color.Green;
ILineFillFormat format = shape.LineFormat.FillFormat;
format.FillType = FillType.Solid;
format.SolidFillColor.Color = Color.Orange;
shape.LineFormat.Width = 2.0;
// Set ThreeDFormat properties of shape
shape.ThreeDFormat.Depth = 4;
shape.ThreeDFormat.BevelTop.BevelType = BevelPresetType.Circle;
shape.ThreeDFormat.BevelTop.Height = 6;
shape.ThreeDFormat.BevelTop.Width = 6;
shape.ThreeDFormat.Camera.CameraType = CameraPresetType.OrthographicFront;
shape.ThreeDFormat.LightRig.LightType = LightRigPresetType.ThreePt;
shape.ThreeDFormat.LightRig.Direction = LightingDirection.Top;
// Write the presentation as a PPTX file
pres.Save(dataDir + "Bavel_out.pptx", SaveFormat.Pptx);
//ExEnd:ApplyBevelEffects
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Adding Picture Frame with Animation/Aspose Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
//Creating empty presentation
Presentation pres = new Presentation();
//Accessing the First slide
Slide slide = pres.GetSlideByPosition(1);
//Adding the picture object to pictures collection of the presentation
Picture pic = new Picture(pres, "pic.jpeg");
//After the picture object is added, the picture is given a uniqe picture Id
int picId = pres.Pictures.Add(pic);
//Adding Picture Frame
Shape PicFrame = slide.Shapes.AddPictureFrame(picId, 1450, 1100, 2500, 2200);
//Applying animation on picture frame
PicFrame.AnimationSettings.EntryEffect = ShapeEntryEffect.BoxIn;
//Saving Presentation
pres.Write("AsposeAnim.ppt");
}
}
}
<|start_filename|>Examples/CSharp/Shapes/FormattedEllipse.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
using System.Drawing;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class FormattedEllipse
{
public static void Run()
{
//ExStart:FormattedEllipse
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Prseetation class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of ellipse type
IShape shp = sld.Shapes.AddAutoShape(ShapeType.Ellipse, 50, 150, 150, 50);
// Apply some formatting to ellipse shape
shp.FillFormat.FillType = FillType.Solid;
shp.FillFormat.SolidFillColor.Color = Color.Chocolate;
// Apply some formatting to the line of Ellipse
shp.LineFormat.FillFormat.FillType = FillType.Solid;
shp.LineFormat.FillFormat.SolidFillColor.Color = Color.Black;
shp.LineFormat.Width = 5;
//Write the PPTX file to disk
pres.Save(dataDir + "EllipseShp2_out.pptx", SaveFormat.Pptx);
}
//ExEnd:FormattedEllipse
}
}
}
<|start_filename|>Examples/CSharp/Rendering-Printing/SpecificPrinterPrinting.cs<|end_filename|>
using System;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Rendering.Printing
{
class SpecificPrinterPrinting
{
public static void Run()
{
//ExStart:SpecificPrinterPrinting
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Rendering();
try
{
// Load the presentation
Presentation presentation = new Presentation(dataDir + "Print.ppt");
// Call the print method to print whole presentation to the desired printer
presentation.Print("Please set your printer name here");
}
catch (Exception ex)
{
Console.WriteLine(ex.Message + "\nPlease set printer name as string parameter to the Presentation Print method");
}
//ExEnd:SpecificPrinterPrinting
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Saving/SaveWithPassword.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Saving
{
public class SaveWithPassword
{
public static void Run()
{
//ExStart:SaveWithPassword
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationSaving();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate a Presentation object that represents a PPT file
Presentation pres = new Presentation();
//....do some work here.....
// Setting Password
pres.ProtectionManager.Encrypt("pass");
// Save your presentation to a file
pres.Save(dataDir + "SaveWithPassword_out.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
//ExEnd:SaveWithPassword
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Creating a Table on PowerPoint Slide/Aspose Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
//Create a presentation
Presentation pres = new Presentation();
//Access first slide
Slide sld = pres.GetSlideByPosition(1);
//Add a table
Aspose.Slides.Table tbl = sld.Shapes.AddTable(50, 50, pres.SlideSize.Width - 100, pres.SlideSize.Height - 100, 15, 15);
//Loop through rows
for (int i = 0; i < tbl.RowsNumber; i++)
//Loop through cells
for (int j = 0; j < tbl.ColumnsNumber; j++)
{
//Get text frame of each cell
TextFrame tf = tbl.GetCell(j, i).TextFrame;
//Add some text
tf.Text = "T" + i.ToString() + j.ToString();
//Set font size of 10
tf.Paragraphs[0].Portions[0].FontHeight = 10;
tf.Paragraphs[0].HasBullet = false;
}
//Write the presentation to the disk
pres.Write("tblSLD.ppt");
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Adding image in table cell/Aspose.Slides/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose.Slides
{
class Program
{
static void Main(string[] args)
{
string FileName = @"E:\Aspose\Aspose Vs VSTO\Aspose.Slides Vs VSTO Presentations v 1.1\Sample Files\Removing Row Or Column in Table.pptx";
string ImageFile = @"E:\Aspose\Aspose Vs VSTO\Aspose.Slides Vs VSTO Presentations v 1.1\Sample Files\AsposeLogo.jpg";
Presentation MyPresentation = new Presentation(FileName);
//Get First Slide
ISlide sld = MyPresentation.Slides[0];
//Creating a Bitmap Image object to hold the image file
System.Drawing.Bitmap image = new Bitmap(ImageFile);
//Create an IPPImage object using the bitmap object
IPPImage imgx1 = MyPresentation.Images.AddImage(image);
foreach (IShape shp in sld.Shapes)
if (shp is ITable)
{
ITable tbl = (ITable)shp;
//Add image to first table cell
tbl[0, 0].FillFormat.FillType = FillType.Picture;
tbl[0, 0].FillFormat.PictureFillFormat.PictureFillMode = PictureFillMode.Stretch;
tbl[0, 0].FillFormat.PictureFillFormat.Picture.Image = imgx1;
}
//Save PPTX to Disk
MyPresentation.Save(FileName, Export.SaveFormat.Pptx);
}
}
}
<|start_filename|>Examples/CSharp/Charts/ExistingChart.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class ExistingChart
{
public static void Run()
{
//ExStart:ExistingChart
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Instantiate Presentation class that represents PPTX file// Instantiate Presentation class that represents PPTX file
Presentation pres = new Presentation(dataDir + "ExistingChart.pptx");
// Access first slideMarker
ISlide sld = pres.Slides[0];
// Add chart with default data
IChart chart = (IChart)sld.Shapes[0];
// Setting the index of chart data sheet
int defaultWorksheetIndex = 0;
// Getting the chart data worksheet
IChartDataWorkbook fact = chart.ChartData.ChartDataWorkbook;
// Changing chart Category Name
fact.GetCell(defaultWorksheetIndex, 1, 0, "Modified Category 1");
fact.GetCell(defaultWorksheetIndex, 2, 0, "Modified Category 2");
// Take first chart series
IChartSeries series = chart.ChartData.Series[0];
// Now updating series data
fact.GetCell(defaultWorksheetIndex, 0, 1, "New_Series1");// Modifying series name
series.DataPoints[0].Value.Data = 90;
series.DataPoints[1].Value.Data = 123;
series.DataPoints[2].Value.Data = 44;
// Take Second chart series
series = chart.ChartData.Series[1];
// Now updating series data
fact.GetCell(defaultWorksheetIndex, 0, 2, "New_Series2");// Modifying series name
series.DataPoints[0].Value.Data = 23;
series.DataPoints[1].Value.Data = 67;
series.DataPoints[2].Value.Data = 99;
// Now, Adding a new series
chart.ChartData.Series.Add(fact.GetCell(defaultWorksheetIndex, 0, 3, "Series 3"), chart.Type);
// Take 3rd chart series
series = chart.ChartData.Series[2];
// Now populating series data
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 1, 3, 20));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 2, 3, 50));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 3, 3, 30));
chart.Type = ChartType.ClusteredCylinder;
// Save presentation with chart
pres.Save(dataDir + "AsposeChartModified_out.pptx", SaveFormat.Pptx);
//ExEnd:ExistingChart
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Tiff conversion with note/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides.Export;
using Aspose.Slides;
namespace Tiff_conversion_with_note
{
class Program
{
static void Main(string[] args)
{
string MyDir = @"Files\";
//Instantiate a Presentation object that represents a presentation file
Presentation pres = new Presentation(MyDir + "Conversion.pptx");
//Saving the presentation to TIFF notes
pres.Save(MyDir + "ConvertedwithNotes.tiff", SaveFormat.TiffNotes);
}
}
}
<|start_filename|>Examples/CSharp/Charts/AutomaticChartSeriescolor.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class AutomaticChartSeriescolor
{
public static void Run()
{
//ExStart:AutomaticChartSeriescolor
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create an instance of Presentation class
Presentation presentation = new Presentation();
// Access first slide
ISlide slide = presentation.Slides[0];
// Add chart with default data
IChart chart = slide.Shapes.AddChart(ChartType.ClusteredColumn, 0, 0, 500, 500);
// Set first series to Show Values
chart.ChartData.Series[0].Labels.DefaultDataLabelFormat.ShowValue = true;
// Setting the index of chart data sheet
int defaultWorksheetIndex = 0;
// Getting the chart data worksheet
IChartDataWorkbook fact = chart.ChartData.ChartDataWorkbook;
// Delete default generated series and categories
chart.ChartData.Series.Clear();
chart.ChartData.Categories.Clear();
int s = chart.ChartData.Series.Count;
s = chart.ChartData.Categories.Count;
// Adding new series
chart.ChartData.Series.Add(fact.GetCell(defaultWorksheetIndex, 0, 1, "Series 1"), chart.Type);
chart.ChartData.Series.Add(fact.GetCell(defaultWorksheetIndex, 0, 2, "Series 2"), chart.Type);
// Adding new categories
chart.ChartData.Categories.Add(fact.GetCell(defaultWorksheetIndex, 1, 0, "Caetegoty 1"));
chart.ChartData.Categories.Add(fact.GetCell(defaultWorksheetIndex, 2, 0, "Caetegoty 2"));
chart.ChartData.Categories.Add(fact.GetCell(defaultWorksheetIndex, 3, 0, "Caetegoty 3"));
// Take first chart series
IChartSeries series = chart.ChartData.Series[0];
// Now populating series data
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 1, 1, 20));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 2, 1, 50));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 3, 1, 30));
// Setting automatic fill color for series
series.Format.Fill.FillType = FillType.NotDefined;
// Take second chart series
series = chart.ChartData.Series[1];
// Now populating series data
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 1, 2, 30));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 2, 2, 10));
series.DataPoints.AddDataPointForBarSeries(fact.GetCell(defaultWorksheetIndex, 3, 2, 60));
// Setting fill color for series
series.Format.Fill.FillType = FillType.Solid;
series.Format.Fill.SolidFillColor.Color = Color.Gray;
// Save presentation with chart
presentation.Save(dataDir + "AutomaticColor_out.pptx", SaveFormat.Pptx);
//ExEnd:AutomaticChartSeriescolor
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Rendering Shapes and Slide to Images/Getting Image from Shape on Slides/Program.cs<|end_filename|>
using Aspose.Slides.Pptx;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Getting_Image_from_Shape_on_Slides
{
class Program
{
static void Main(string[] args)
{
string path = @"Files\";
//Accessing the presentation
PresentationEx pres = new PresentationEx(path + "RenderImageFromShape.pptx");
ImageEx img = null;
int slideIndex = 0;
String ImageType = "";
bool ifImageFound = false;
for (int i = 0; i < pres.Slides.Count; i++)
{
slideIndex++;
//Accessing the first slide
SlideEx sl = pres.Slides[i];
System.Drawing.Imaging.ImageFormat Format = System.Drawing.Imaging.ImageFormat.Jpeg;
for (int j = 0; j < sl.Shapes.Count; j++)
{
// Accessing the shape with picture
ShapeEx sh = sl.Shapes[j];
if (sh is AutoShapeEx)
{
AutoShapeEx ashp = (AutoShapeEx)sh;
if (ashp.FillFormat.FillType == FillTypeEx.Picture)
{
img = ashp.FillFormat.PictureFillFormat.Picture.Image;
ImageType = img.ContentType;
ImageType = ImageType.Remove(0, ImageType.IndexOf("/") + 1);
ifImageFound = true;
}
}
else if (sh is PictureFrameEx)
{
PictureFrameEx pf = (PictureFrameEx)sh;
if (pf.FillFormat.FillType == FillTypeEx.Picture)
{
img = pf.PictureFormat.Picture.Image;
ImageType = img.ContentType;
ImageType = ImageType.Remove(0, ImageType.IndexOf("/") + 1);
ifImageFound = true;
}
}
//
//Setting the desired picture format
if (ifImageFound)
{
switch (ImageType)
{
case "jpeg":
Format = System.Drawing.Imaging.ImageFormat.Jpeg;
break;
case "emf":
Format = System.Drawing.Imaging.ImageFormat.Emf;
break;
case "bmp":
Format = System.Drawing.Imaging.ImageFormat.Bmp;
break;
case "png":
Format = System.Drawing.Imaging.ImageFormat.Png;
break;
case "wmf":
Format = System.Drawing.Imaging.ImageFormat.Wmf;
break;
case "gif":
Format = System.Drawing.Imaging.ImageFormat.Gif;
break;
}
//
img.Image.Save(path+"ResultedImage"+"." + ImageType, Format);
}
ifImageFound = false;
}
}
}
}
}
<|start_filename|>Examples/CSharp/Charts/DisplayPercentageAsLabels.cs<|end_filename|>
using System;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class DisplayPercentageAsLabels
{
public static void Run()
{
//ExStart:DisplayPercentageAsLabels
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create an instance of Presentation class
Presentation presentation = new Presentation();
ISlide slide = presentation.Slides[0];
IChart chart = slide.Shapes.AddChart(ChartType.StackedColumn, 20, 20, 400, 400);
IChartSeries series = chart.ChartData.Series[0];
IChartCategory cat;
double[] total_for_Cat = new double[chart.ChartData.Categories.Count];
for (int k = 0; k < chart.ChartData.Categories.Count; k++)
{
cat = chart.ChartData.Categories[k];
for (int i = 0; i < chart.ChartData.Series.Count; i++)
{
total_for_Cat[k] = total_for_Cat[k] + Convert.ToDouble(chart.ChartData.Series[i].DataPoints[k].Value.Data);
}
}
double dataPontPercent = 0f;
for (int x = 0; x < chart.ChartData.Series.Count; x++)
{
series = chart.ChartData.Series[x];
series.Labels.DefaultDataLabelFormat.ShowLegendKey = false;
for (int j = 0; j < series.DataPoints.Count; j++)
{
IDataLabel lbl = series.DataPoints[j].Label;
dataPontPercent = (Convert.ToDouble(series.DataPoints[j].Value.Data) / total_for_Cat[j]) * 100;
IPortion port = new Portion();
port.Text = String.Format("{0:F2} %", dataPontPercent);
port.PortionFormat.FontHeight = 8f;
lbl.TextFrameForOverriding.Text = "";
IParagraph para = lbl.TextFrameForOverriding.Paragraphs[0];
para.Portions.Add(port);
lbl.DataLabelFormat.ShowSeriesName = false;
lbl.DataLabelFormat.ShowPercentage = false;
lbl.DataLabelFormat.ShowLegendKey = false;
lbl.DataLabelFormat.ShowCategoryName = false;
lbl.DataLabelFormat.ShowBubbleSize = false;
}
}
// Save presentation with chart
presentation.Save(dataDir + "DisplayPercentageAsLabels_out.pptx", SaveFormat.Pptx);
//ExEnd:DisplayPercentageAsLabels
}
}
}
<|start_filename|>Examples/CSharp/Shapes/ConnectorLineAngle.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using System;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class ConnectorLineAngle
{
//ExStart:ConnectorLineAngle
public static void Run()
{
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
Presentation pres = new Presentation(dataDir + "ConnectorLineAngle.pptx");
Slide slide = (Slide)pres.Slides[0];
Shape shape;
for (int i = 0; i < slide.Shapes.Count; i++)
{
double dir = 0.0;
shape = (Shape)slide.Shapes[i];
if (shape is AutoShape)
{
AutoShape ashp = (AutoShape)shape;
if (ashp.ShapeType == ShapeType.Line)
{
dir = getDirection(ashp.Width, ashp.Height, Convert.ToBoolean(ashp.Frame.FlipH), Convert.ToBoolean(ashp.Frame.FlipV));
}
}
else if (shape is Connector)
{
Connector ashp = (Connector)shape;
dir = getDirection(ashp.Width, ashp.Height, Convert.ToBoolean(ashp.Frame.FlipH), Convert.ToBoolean(ashp.Frame.FlipV));
}
Console.WriteLine(dir);
}
}
public static double getDirection(float w, float h, bool flipH, bool flipV)
{
float endLineX = w * (flipH ? -1 : 1);
float endLineY = h * (flipV ? -1 : 1);
float endYAxisX = 0;
float endYAxisY = h;
double angle = (Math.Atan2(endYAxisY, endYAxisX) - Math.Atan2(endLineY, endLineX));
if (angle < 0) angle += 2 * Math.PI;
return angle * 180.0 / Math.PI;
}
//ExEnd:ConnectorLineAngle
}
}
<|start_filename|>Examples/CSharp/Presentations/Properties/ModifyBuiltinProperties.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations
{
public class ModifyBuiltinProperties
{
public static void Run()
{
//ExStart:ModifyBuiltinProperties
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationProperties();
// Instantiate the Presentation class that represents the Presentation
Presentation presentation = new Presentation(dataDir + "ModifyBuiltinProperties.pptx");
// Create a reference to IDocumentProperties object associated with Presentation
IDocumentProperties documentProperties = presentation.DocumentProperties;
// Set the builtin properties
documentProperties.Author = "Aspose.Slides for .NET";
documentProperties.Title = "Modifying Presentation Properties";
documentProperties.Subject = "Aspose Subject";
documentProperties.Comments = "Aspose Description";
documentProperties.Manager = "Aspose Manager";
// Save your presentation to a file
presentation.Save(dataDir + "DocumentProperties_out.pptx", SaveFormat.Pptx);
//ExEnd:ModifyBuiltinProperties
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Properties/AccessModifyingProperties.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Properties
{
public class AccessModifyingProperties
{
public static void Run()
{
//ExStart:AccessModifyingProperties
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationProperties();
// Instanciate the Presentation class that represents the PPTX
Presentation presentation = new Presentation(dataDir + "AccessModifyingProperties.pptx");
// Create a reference to DocumentProperties object associated with Prsentation
IDocumentProperties documentProperties = presentation.DocumentProperties;
// Access and modify custom properties
for (int i = 0; i < documentProperties.CountOfCustomProperties; i++)
{
// Display names and values of custom properties
System.Console.WriteLine("Custom Property Name : " + documentProperties.GetCustomPropertyName(i));
System.Console.WriteLine("Custom Property Value : " + documentProperties[documentProperties.GetCustomPropertyName(i)]);
// Modify values of custom properties
documentProperties[documentProperties.GetCustomPropertyName(i)] = "New Value " + (i + 1);
}
// Save your presentation to a file
presentation.Save(dataDir + "CustomDemoModified_out.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
//ExEnd:AccessModifyingProperties
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Add a comment to a slide/OpenXml Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using System.Linq;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Add a comment to a slide.pptx";
AddCommentToPresentation(FileName,
"Zeeshan", "MZ",
"This is my programmatically added comment.");
}
// Adds a comment to the first slide of the presentation document.
// The presentation document must contain at least one slide.
private static void AddCommentToPresentation(string file, string initials, string name, string text)
{
using (PresentationDocument doc = PresentationDocument.Open(file, true))
{
// Declare a CommentAuthorsPart object.
CommentAuthorsPart authorsPart;
// Verify that there is an existing comment authors part.
if (doc.PresentationPart.CommentAuthorsPart == null)
{
// If not, add a new one.
authorsPart = doc.PresentationPart.AddNewPart<CommentAuthorsPart>();
}
else
{
authorsPart = doc.PresentationPart.CommentAuthorsPart;
}
// Verify that there is a comment author list in the comment authors part.
if (authorsPart.CommentAuthorList == null)
{
// If not, add a new one.
authorsPart.CommentAuthorList = new CommentAuthorList();
}
// Declare a new author ID.
uint authorId = 0;
CommentAuthor author = null;
// If there are existing child elements in the comment authors list...
if (authorsPart.CommentAuthorList.HasChildren)
{
// Verify that the author passed in is on the list.
var authors = authorsPart.CommentAuthorList.Elements<CommentAuthor>().Where(a => a.Name == name && a.Initials == initials);
// If so...
if (authors.Any())
{
// Assign the new comment author the existing author ID.
author = authors.First();
authorId = author.Id;
}
// If not...
if (author == null)
{
// Assign the author passed in a new ID
authorId = authorsPart.CommentAuthorList.Elements<CommentAuthor>().Select(a => a.Id.Value).Max();
}
}
// If there are no existing child elements in the comment authors list.
if (author == null)
{
authorId++;
// Add a new child element(comment author) to the comment author list.
author = authorsPart.CommentAuthorList.AppendChild<CommentAuthor>
(new CommentAuthor()
{
Id = authorId,
Name = name,
Initials = initials,
ColorIndex = 0
});
}
// Get the first slide, using the GetFirstSlide method.
SlidePart slidePart1 = GetFirstSlide(doc);
// Declare a comments part.
SlideCommentsPart commentsPart;
// Verify that there is a comments part in the first slide part.
if (slidePart1.GetPartsOfType<SlideCommentsPart>().Count() == 0)
{
// If not, add a new comments part.
commentsPart = slidePart1.AddNewPart<SlideCommentsPart>();
}
else
{
// Else, use the first comments part in the slide part.
commentsPart = slidePart1.GetPartsOfType<SlideCommentsPart>().First();
}
// If the comment list does not exist.
if (commentsPart.CommentList == null)
{
// Add a new comments list.
commentsPart.CommentList = new CommentList();
}
// Get the new comment ID.
uint commentIdx = author.LastIndex == null ? 1 : author.LastIndex + 1;
author.LastIndex = commentIdx;
// Add a new comment.
Comment comment = commentsPart.CommentList.AppendChild<Comment>(
new Comment()
{
AuthorId = authorId,
Index = commentIdx,
DateTime = DateTime.Now
});
// Add the position child node to the comment element.
comment.Append(
new Position() { X = 100, Y = 200 },
new Text() { Text = text });
// Save the comment authors part.
authorsPart.CommentAuthorList.Save();
// Save the comments part.
commentsPart.CommentList.Save();
}
}
// Get the slide part of the first slide in the presentation document.
private static SlidePart GetFirstSlide(PresentationDocument presentationDocument)
{
// Get relationship ID of the first slide
PresentationPart part = presentationDocument.PresentationPart;
SlideId slideId = part.Presentation.SlideIdList.GetFirstChild<SlideId>();
string relId = slideId.RelationshipId;
// Get the slide part by the relationship ID.
SlidePart slidePart = (SlidePart)part.GetPartById(relId);
return slidePart;
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Conversion PPT to PPTX/PPT to PPTX/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Text;
namespace Conversion_from_PPt_to_PPtx_format
{
class Program
{
static void Main(string[] args)
{
string MyDir = @"Files\";
PresentationEx presentation = new PresentationEx(MyDir + "Sample.ppt");
presentation.Save(MyDir + "Converted.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Get all the text in a slide/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
using System;
using System.Collections.Generic;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Get all the text in a slide.pptx";
foreach (string s in GetAllTextInSlide(FileName, 0))
Console.WriteLine(s);
Console.ReadKey();
}
// Get all the text in a slide.
public static List<string> GetAllTextInSlide(string presentationFile, int slideIndex)
{
// Create a new linked list of strings.
List<string> texts = new List<string>();
//Instantiate PresentationEx class that represents PPTX
using (Presentation pres = new Presentation(presentationFile))
{
//Access the slide
ISlide sld = pres.Slides[slideIndex];
//Iterate through shapes to find the placeholder
foreach (Shape shp in sld.Shapes)
if (shp.Placeholder != null)
{
//get the text of each placeholder
texts.Add(((AutoShape)shp).TextFrame.Text);
}
}
// Return an array of strings.
return texts;
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Count the number of Slides/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
using System;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Count the number of slides.pptx";
Console.WriteLine("Number of slides = {0}",
CountSlides(FileName));
Console.ReadKey();
}
public static int CountSlides(string presentationFile)
{
//Instantiate a PresentationEx object that represents a PPTX file
using (Presentation pres = new Presentation(presentationFile))
{
return pres.Slides.Count;
}
}
}
}
<|start_filename|>Examples/CSharp/Shapes/AccessingAltTextinGroupshapes.cs<|end_filename|>
using System;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class AccessingAltTextinGroupshapes
{
public static void Run()
{
//ExStart:AccessingAltTextinGroupshapes
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Instantiate Presentation class that represents PPTX file
Presentation pres = new Presentation(dataDir + "AltText.pptx");
// Get the first slide
ISlide sld = pres.Slides[0];
for (int i = 0; i < sld.Shapes.Count; i++)
{
// Accessing the shape collection of slides
IShape shape = sld.Shapes[i];
if (shape is GroupShape)
{
// Accessing the group shape.
IGroupShape grphShape = (IGroupShape)shape;
for (int j = 0; j < grphShape.Shapes.Count; j++)
{
IShape shape2 = grphShape.Shapes[j];
// Accessing the AltText property
Console.WriteLine(shape2.AlternativeText);
}
}
}
//ExEnd:AccessingAltTextinGroupshapes
}
}
}
<|start_filename|>Plugins/OpenXML/Missing Features/Assemble Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
private static string MyDir = @"..\..\..\Sample Files\";
static void Main(string[] args)
{
AddingSlidetoPresentation();
AccessingSlidesOfPresentation();
RemovingSlides();
ChangingPositionOfSlide();
}
public static void AddingSlidetoPresentation()
{
Presentation pres = new Presentation();
//Instantiate SlideCollection class
ISlideCollection slds = pres.Slides;
for (int i = 0; i < pres.LayoutSlides.Count; i++)
{
//Add an empty slide to the Slides collection
slds.AddEmptySlide(pres.LayoutSlides[i]);
}
//Save the PPTX file to the Disk
pres.Save(MyDir + "Assemble Slides.pptx", SaveFormat.Pptx);
}
public static void AccessingSlidesOfPresentation()
{
//Instantiate a Presentation object that represents a presentation file
Presentation pres = new Presentation(MyDir + "Assemble Slides.pptx");
//Accessing a slide using its slide index
ISlide slide = pres.Slides[0];
}
public static void RemovingSlides()
{
//Instantiate a Presentation object that represents a presentation file
Presentation pres = new Presentation(MyDir + "Assemble Slides.pptx");
//Accessing a slide using its index in the slides collection
ISlide slide = pres.Slides[0];
//Removing a slide using its reference
pres.Slides.Remove(slide);
//Writing the presentation file
pres.Save(MyDir + "Assemble Slides.pptx", SaveFormat.Pptx);
}
public static void ChangingPositionOfSlide()
{
//Instantiate Presentation class to load the source presentation file
Presentation pres = new Presentation(MyDir + "Assemble Slides.pptx");
{
//Get the slide whose position is to be changed
ISlide sld = pres.Slides[0];
//Set the new position for the slide
sld.SlideNumber = 2;
//Write the presentation to disk
pres.Save(MyDir + "Assemble Slides.pptx", SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Add Table on Slide/Aspose.Slides/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose.Slides
{
class Program
{
static void Main(string[] args)
{
//Instantiate Prsentation class that represents the PPTX
Presentation pres = new Presentation();
//Get the first slide
ISlide sld = pres.Slides[0];
//Define columns with widths and rows with heights
double[] dblCols = { 50, 50, 50 };
double[] dblRows = { 50, 30, 30, 30, 30 };
//Add table shape to slide
ITable tbl = sld.Shapes.AddTable(100, 50, dblCols, dblRows);
//Set border format for each cell
foreach (IRow row in tbl.Rows)
foreach (ICell cell in row)
{
cell.BorderTop.FillFormat.FillType = FillType.Solid;
cell.BorderTop.FillFormat.SolidFillColor.Color = Color.Red;
cell.BorderTop.Width = 5;
cell.BorderBottom.FillFormat.FillType = FillType.Solid;
cell.BorderBottom.FillFormat.SolidFillColor.Color = Color.Red;
cell.BorderBottom.Width = 5;
cell.BorderLeft.FillFormat.FillType = FillType.Solid;
cell.BorderLeft.FillFormat.SolidFillColor.Color = Color.Red;
cell.BorderLeft.Width = 5;
cell.BorderRight.FillFormat.FillType = FillType.Solid;
cell.BorderRight.FillFormat.SolidFillColor.Color = Color.Red;
cell.BorderRight.Width = 5;
}
//Merge cells 1 & 2 of row 1
tbl.MergeCells(tbl[0, 0], tbl[1, 0], false);
//Add text to the merged cell
tbl[0, 0].TextFrame.Text = "Merged Cells";
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Adding Text Dynamically using VSTO and Aspose.Slides/VSTO Slides/ThisAddIn.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using Office = Microsoft.Office.Core;
namespace VSTO_Slides
{
public partial class ThisAddIn
{
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
//Create a presentation
PowerPoint.Presentation pres = Globals.ThisAddIn.Application
.Presentations.Add(Microsoft.Office.Core.MsoTriState.msoFalse);
//Get the blank slide layout
PowerPoint.CustomLayout layout = pres.SlideMaster.
CustomLayouts[7];
//Add a blank slide
PowerPoint.Slide sld = pres.Slides.AddSlide(1, layout);
//Add a text
PowerPoint.Shape shp = sld.Shapes.AddTextbox(Microsoft.Office.Core.MsoTextOrientation.msoTextOrientationHorizontal, 150, 100, 400, 100);
//Set a text
PowerPoint.TextRange txtRange = shp.TextFrame.TextRange;
txtRange.Text = "Text added dynamically";
txtRange.Font.Name = "Arial";
txtRange.Font.Bold = Microsoft.Office.Core.MsoTriState.msoTrue;
txtRange.Font.Size = 32;
//Write the output to disk
pres.SaveAs("outVSTOAddingText.ppt",
PowerPoint.PpSaveAsFileType.ppSaveAsPresentation,
Microsoft.Office.Core.MsoTriState.msoFalse);
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Converting to HTML/Program.cs<|end_filename|>
using Aspose.Slides.Export;
using Aspose.Slides;
namespace Converting_to_HTML
{
class Program
{
static void Main(string[] args)
{
string MyDir = @"Files\";
//Instantiate a Presentation object that represents a presentation file
Presentation pres = new Presentation(MyDir + "Conversion.ppt");
HtmlOptions htmlOpt = new HtmlOptions();
htmlOpt.HtmlFormatter = HtmlFormatter.CreateDocumentFormatter("", false);
//Saving the presentation to HTML
pres.Save(MyDir + "Converted.html", Aspose.Slides.Export.SaveFormat.Html, htmlOpt);
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Presentation Locking/Protect Presentation/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Aspose.Slides.Pptx;
using Aspose.Slides;
namespace Protect_Presentation
{
class Program
{
static void Main(string[] args)
{
ApplyingProtection();
RemovingProtection();
}
static void ApplyingProtection()
{
string MyDir = @"Files\";
//Instatiate Presentation class that represents a PPTX file
PresentationEx pTemplate = new PresentationEx(MyDir + "Applying Protection.pptx");//Instatiate Presentation class that represents a PPTX file
//ISlide object for accessing the slides in the presentation
SlideEx slide = pTemplate.Slides[0];
//IShape object for holding temporary shapes
ShapeEx shape;
//Traversing through all the slides in the presentation
for (int slideCount = 0; slideCount < pTemplate.Slides.Count; slideCount++)
{
slide = pTemplate.Slides[slideCount];
//Travesing through all the shapes in the slides
for (int count = 0; count < slide.Shapes.Count; count++)
{
shape = slide.Shapes[count];
//if shape is autoshape
if (shape is AutoShapeEx)
{
//Type casting to Auto shape and getting auto shape lock
AutoShapeEx Ashp = shape as AutoShapeEx;
AutoShapeLockEx AutoShapeLock = Ashp.ShapeLock;
//Applying shapes locks
AutoShapeLock.PositionLocked = true;
AutoShapeLock.SelectLocked = true;
AutoShapeLock.SizeLocked = true;
}
//if shape is group shape
else if (shape is GroupShapeEx)
{
//Type casting to group shape and getting group shape lock
GroupShapeEx Group = shape as GroupShapeEx;
GroupShapeLockEx groupShapeLock = Group.ShapeLock;
//Applying shapes locks
groupShapeLock.GroupingLocked = true;
groupShapeLock.PositionLocked = true;
groupShapeLock.SelectLocked = true;
groupShapeLock.SizeLocked = true;
}
//if shape is a connector
else if (shape is ConnectorEx)
{
//Type casting to connector shape and getting connector shape lock
ConnectorEx Conn = shape as ConnectorEx;
ConnectorLockEx ConnLock = Conn.ShapeLock;
//Applying shapes locks
ConnLock.PositionMove = true;
ConnLock.SelectLocked = true;
ConnLock.SizeLocked = true;
}
//if shape is picture frame
else if (shape is PictureFrameEx)
{
//Type casting to picture frame shape and getting picture frame shape lock
PictureFrameEx Pic = shape as PictureFrameEx;
PictureFrameLockEx PicLock = Pic.ShapeLock;
//Applying shapes locks
PicLock.PositionLocked = true;
PicLock.SelectLocked = true;
PicLock.SizeLocked = true;
}
}
}
//Saving the presentation file
pTemplate.Save(MyDir+"ProtectedSample.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
}
static void RemovingProtection()
{
string MyDir = @"Files\";
//Open the desired presentation
PresentationEx pTemplate = new PresentationEx(MyDir + "ProtectedSample.pptx");
//ISlide object for accessing the slides in the presentation
SlideEx slide = pTemplate.Slides[0];
//IShape object for holding temporary shapes
ShapeEx shape;
//Traversing through all the slides in presentation
for (int slideCount = 0; slideCount < pTemplate.Slides.Count; slideCount++)
{
slide = pTemplate.Slides[slideCount];
//Travesing through all the shapes in the slides
for (int count = 0; count < slide.Shapes.Count; count++)
{
shape = slide.Shapes[count];
//if shape is autoshape
if (shape is AutoShapeEx)
{
//Type casting to Auto shape and getting auto shape lock
AutoShapeEx Ashp = shape as AutoShapeEx;
AutoShapeLockEx AutoShapeLock = Ashp.ShapeLock;
//Applying shapes locks
AutoShapeLock.PositionLocked = false;
AutoShapeLock.SelectLocked = false;
AutoShapeLock.SizeLocked = false;
}
//if shape is group shape
else if (shape is GroupShapeEx)
{
//Type casting to group shape and getting group shape lock
GroupShapeEx Group = shape as GroupShapeEx;
GroupShapeLockEx groupShapeLock = Group.ShapeLock;
//Applying shapes locks
groupShapeLock.GroupingLocked = false;
groupShapeLock.PositionLocked = false;
groupShapeLock.SelectLocked = false;
groupShapeLock.SizeLocked = false;
}
//if shape is Connector shape
else if (shape is ConnectorEx)
{
//Type casting to connector shape and getting connector shape lock
ConnectorEx Conn = shape as ConnectorEx;
ConnectorLockEx ConnLock = Conn.ShapeLock;
//Applying shapes locks
ConnLock.PositionMove = false;
ConnLock.SelectLocked = false;
ConnLock.SizeLocked = false;
}
//if shape is picture frame
else if (shape is PictureFrameEx)
{
//Type casting to pitcture frame shape and getting picture frame shape lock
PictureFrameEx Pic = shape as PictureFrameEx;
PictureFrameLockEx PicLock = Pic.ShapeLock;
//Applying shapes locks
PicLock.PositionLocked = false;
PicLock.SelectLocked = false;
PicLock.SizeLocked = false;
}
}
}
//Saving the presentation file
pTemplate.Save(MyDir+"RemoveProtectionSample.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Saving/SaveToStream.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Saving
{
class SaveToStream
{
public static void Run()
{
//ExStart:SaveToStream
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationSaving();
// Instantiate a Presentation object that represents a PPT file
using (Presentation presentation = new Presentation())
{
IAutoShape shape = presentation.Slides[0].Shapes.AddAutoShape(ShapeType.Rectangle, 200, 200, 200, 200);
// Add text to shape
shape.TextFrame.Text = "This demo shows how to Create PowerPoint file and save it to Stream.";
FileStream toStream = new FileStream(dataDir + "Save_As_Stream_out.pptx", FileMode.Create);
presentation.Save(toStream, Aspose.Slides.Export.SaveFormat.Pptx);
toStream.Close();
}
//ExEnd:SaveToStream
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Fill Color of a Shape/OpenXML Presentation/Program.cs<|end_filename|>
using DocumentFormat.OpenXml;
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using Drawing = DocumentFormat.OpenXml.Drawing;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Fill color of a shape.pptx";
SetPPTShapeColor(FileName);
}
// Change the fill color of a shape.
// The test file must have a filled shape as the first shape on the first slide.
public static void SetPPTShapeColor(string docName)
{
using (PresentationDocument ppt = PresentationDocument.Open(docName, true))
{
// Get the relationship ID of the first slide.
PresentationPart part = ppt.PresentationPart;
OpenXmlElementList slideIds = part.Presentation.SlideIdList.ChildElements;
string relId = (slideIds[0] as SlideId).RelationshipId;
// Get the slide part from the relationship ID.
SlidePart slide = (SlidePart)part.GetPartById(relId);
if (slide != null)
{
// Get the shape tree that contains the shape to change.
ShapeTree tree = slide.Slide.CommonSlideData.ShapeTree;
// Get the first shape in the shape tree.
Shape shape = tree.GetFirstChild<Shape>();
if (shape != null)
{
// Get the style of the shape.
ShapeStyle style = shape.ShapeStyle;
// Get the fill reference.
Drawing.FillReference fillRef = style.FillReference;
// Set the fill color to SchemeColor Accent 6;
fillRef.SchemeColor = new Drawing.SchemeColor();
fillRef.SchemeColor.Val = Drawing.SchemeColorValues.Accent6;
// Save the modified slide.
slide.Slide.Save();
}
}
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Create a Chart/VSTO Slides/ThisAddIn.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using Office = Microsoft.Office.Core;
using System.Drawing;
using Microsoft.Office.Core;
namespace VSTO_Slides
{
public partial class ThisAddIn
{
//Global Variables
public static Microsoft.Office.Interop.PowerPoint.Application objPPT;
public static Microsoft.Office.Interop.PowerPoint.Presentation objPres;
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
EnsurePowerPointIsRunning(true, true);
//Instantiate slide object
Microsoft.Office.Interop.PowerPoint.Slide objSlide = null;
//Access the first slide of presentation
objSlide = objPres.Slides[1];
//Select firs slide and set its layout
objSlide.Select();
objSlide.Layout = Microsoft.Office.Interop.PowerPoint.PpSlideLayout.ppLayoutBlank;
//Add a default chart in slide
objSlide.Shapes.AddChart(Microsoft.Office.Core.XlChartType.xl3DColumn, 20F, 30F, 400F, 300F);
//Access the added chart
Microsoft.Office.Interop.PowerPoint.Chart ppChart = objSlide.Shapes[1].Chart;
//Access the chart data
Microsoft.Office.Interop.PowerPoint.ChartData chartData = ppChart.ChartData;
//Create instance to Excel workbook to work with chart data
Microsoft.Office.Interop.Excel.Workbook dataWorkbook = (Microsoft.Office.Interop.Excel.Workbook)chartData.Workbook;
//Accessing the data worksheet for chart
Microsoft.Office.Interop.Excel.Worksheet dataSheet = dataWorkbook.Worksheets[1];
//Setting the range of chart
Microsoft.Office.Interop.Excel.Range tRange = dataSheet.Cells.get_Range("A1", "B5");
//Applying the set range on chart data table
Microsoft.Office.Interop.Excel.ListObject tbl1 = dataSheet.ListObjects["Table1"];
tbl1.Resize(tRange);
//Setting values for categories and respective series data
((Microsoft.Office.Interop.Excel.Range)(dataSheet.Cells.get_Range("A2"))).FormulaR1C1 = "Bikes";
((Microsoft.Office.Interop.Excel.Range)(dataSheet.Cells.get_Range("A3"))).FormulaR1C1 = "Accessories";
((Microsoft.Office.Interop.Excel.Range)(dataSheet.Cells.get_Range("A4"))).FormulaR1C1 = "Repairs";
((Microsoft.Office.Interop.Excel.Range)(dataSheet.Cells.get_Range("A5"))).FormulaR1C1 = "Clothing";
((Microsoft.Office.Interop.Excel.Range)(dataSheet.Cells.get_Range("B2"))).FormulaR1C1 = "1000";
((Microsoft.Office.Interop.Excel.Range)(dataSheet.Cells.get_Range("B3"))).FormulaR1C1 = "2500";
((Microsoft.Office.Interop.Excel.Range)(dataSheet.Cells.get_Range("B4"))).FormulaR1C1 = "4000";
((Microsoft.Office.Interop.Excel.Range)(dataSheet.Cells.get_Range("B5"))).FormulaR1C1 = "3000";
//Setting chart title
ppChart.ChartTitle.Font.Italic = true;
ppChart.ChartTitle.Text = "2007 Sales";
ppChart.ChartTitle.Font.Size = 18;
ppChart.ChartTitle.Font.Color = Color.Black.ToArgb();
ppChart.ChartTitle.Format.Line.Visible = Microsoft.Office.Core.MsoTriState.msoTrue;
ppChart.ChartTitle.Format.Line.ForeColor.RGB = Color.Black.ToArgb();
//Accessing Chart value axis
Microsoft.Office.Interop.PowerPoint.Axis valaxis = ppChart.Axes(Microsoft.Office.Interop.PowerPoint.XlAxisType.xlValue, Microsoft.Office.Interop.PowerPoint.XlAxisGroup.xlPrimary);
//Setting values axis units
valaxis.MajorUnit = 2000.0F;
valaxis.MinorUnit = 1000.0F;
valaxis.MinimumScale = 0.0F;
valaxis.MaximumScale = 4000.0F;
//Accessing Chart Depth axis
Microsoft.Office.Interop.PowerPoint.Axis Depthaxis = ppChart.Axes(Microsoft.Office.Interop.PowerPoint.XlAxisType.xlSeriesAxis, Microsoft.Office.Interop.PowerPoint.XlAxisGroup.xlPrimary);
Depthaxis.Delete();
//Setting chart rotation
ppChart.Rotation = 20; //Y-Value
ppChart.Elevation = 15; //X-Value
ppChart.RightAngleAxes = false;
// Save the presentation as a PPTX
objPres.SaveAs("VSTOSampleChart", Microsoft.Office.Interop.PowerPoint.PpSaveAsFileType.ppSaveAsDefault, MsoTriState.msoTrue);
//Close Workbook and presentation
dataWorkbook.Application.Quit();
objPres.Application.Quit();
}
//Supplementary methods
public static void StartPowerPoint()
{
objPPT = new Microsoft.Office.Interop.PowerPoint.Application();
objPPT.Visible = MsoTriState.msoTrue;
// objPPT.WindowState = PowerPoint.PpWindowState.ppWindowMaximized
}
public static void EnsurePowerPointIsRunning(bool blnAddPresentation)
{
EnsurePowerPointIsRunning(blnAddPresentation, false);
}
public static void EnsurePowerPointIsRunning()
{
EnsurePowerPointIsRunning(false, false);
}
public static void EnsurePowerPointIsRunning(bool blnAddPresentation, bool blnAddSlide)
{
string strName = null;
//
//Try accessing the name property. If it causes an exception then
//start a new instance of PowerPoint
try
{
strName = objPPT.Name;
}
catch (Exception)
{
StartPowerPoint();
}
//
//blnAddPresentation is used to ensure there is a presentation loaded
if (blnAddPresentation == true)
{
try
{
strName = objPres.Name;
}
catch (Exception)
{
objPres = objPPT.Presentations.Add(MsoTriState.msoTrue);
}
}
//
//BlnAddSlide is used to ensure there is at least one slide in the
//presentation
if (blnAddSlide)
{
try
{
strName = objPres.Slides[1].Name;
}
catch (Exception)
{
Microsoft.Office.Interop.PowerPoint.Slide objSlide = null;
Microsoft.Office.Interop.PowerPoint.CustomLayout objCustomLayout = null;
objCustomLayout = objPres.SlideMaster.CustomLayouts[1];
objSlide = objPres.Slides.AddSlide(1, objCustomLayout);
objSlide.Layout = Microsoft.Office.Interop.PowerPoint.PpSlideLayout.ppLayoutText;
objCustomLayout = null;
objSlide = null;
}
}
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Examples/CSharp/Shapes/SimpleRectangle.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class SimpleRectangle
{
public static void Run()
{
//ExStart:SimpleRectangle
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Prseetation class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of rectangle type
sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 150, 150, 50);
//Write the PPTX file to disk
pres.Save(dataDir+ "RectShp1_out.pptx", SaveFormat.Pptx);
}
//ExEnd:SimpleRectangle
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Conversion/ConvertWithoutXpsOptions.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Conversion
{
public class ConvertWithoutXpsOptions
{
public static void Run()
{
//ExStart:ConvertWithoutXpsOptions
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Conversion();
// Instantiate a Presentation object that represents a presentation file
using (Presentation pres = new Presentation(dataDir + "Convert_XPS.pptx"))
{
// Saving the presentation to XPS document
pres.Save(dataDir + "XPS_Output_Without_XPSOption_out.xps", SaveFormat.Xps);
}
//ExEnd:ConvertWithoutXpsOptions
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Creating Slide SVG Image/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using System.IO;
using Aspose.Slides;
namespace Creating_Slide_SVG_Image
{
class Program
{
static void Main(string[] args)
{
//Instantiate a Presentation class that represents the presentation file
string MyDir = @"Files\";
using (Presentation pres = new Presentation(MyDir + "Slides Test Presentation.pptx"))
{
//Access the second slide
ISlide sld = pres.Slides[1];
//Create a memory stream object
MemoryStream SvgStream = new MemoryStream();
//Generate SVG image of slide and save in memory stream
sld.WriteAsSvg(SvgStream);
SvgStream.Position = 0;
//Save memory stream to file
using (Stream fileStream = System.IO.File.OpenWrite(MyDir + "PresentatoinTemplate.svg"))
{
byte[] buffer = new byte[8 * 1024];
int len;
while ((len = SvgStream.Read(buffer, 0, buffer.Length)) > 0)
{
fileStream.Write(buffer, 0, len);
}
}
SvgStream.Close();
}
}
}
}
<|start_filename|>Examples/CSharp/Slides/CRUD/AddSlides.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.CRUD
{
public class AddSlides
{
public static void Run()
{
//ExStart:AddSlides
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_CRUD();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Presentation class that represents the presentation file
using (Presentation pres = new Presentation())
{
// Instantiate SlideCollection calss
ISlideCollection slds = pres.Slides;
for (int i = 0; i < pres.LayoutSlides.Count; i++)
{
// Add an empty slide to the Slides collection
slds.AddEmptySlide(pres.LayoutSlides[i]);
}
// Save the PPTX file to the Disk
pres.Save(dataDir + "EmptySlide_out.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
}
//ExEnd:AddSlides
}
}
}
<|start_filename|>Examples/CSharp/ActiveX/LinkingVideoActiveXControl.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.ActiveX
{
public class LinkingVideoActiveXControl
{
public static void Run()
{
//ExStart:LinkingVideoActiveXControl
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_ActiveX();
string dataVideo = RunExamples.GetDataDir_Video();
// Instantiate Presentation class that represents PPTX file
Presentation presentation = new Presentation(dataDir + "template.pptx");
// Create empty presentation instance
Presentation newPresentation = new Presentation();
// Remove default slide
newPresentation.Slides.RemoveAt(0);
// Clone slide with Media Player ActiveX Control
newPresentation.Slides.InsertClone(0, presentation.Slides[0]);
// Access the Media Player ActiveX control and set the video path
newPresentation.Slides[0].Controls[0].Properties["URL"] = dataVideo + "Wildlife.mp4";
// Save the Presentation
newPresentation.Save(dataDir + "LinkingVideoActiveXControl_out.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
//ExEnd:LinkingVideoActiveXControl
}
}
}
<|start_filename|>Examples/CSharp/Slides/Background/SetImageAsBackground.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.Background
{
public class SetImageAsBackground
{
public static void Run()
{
//ExStart:SetImageAsBackground
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_Background();
// Instantiate the Presentation class that represents the presentation file
using (Presentation pres = new Presentation(dataDir + "SetImageAsBackground.pptx"))
{
// Set the background with Image
pres.Slides[0].Background.Type = BackgroundType.OwnBackground;
pres.Slides[0].Background.FillFormat.FillType = FillType.Picture;
pres.Slides[0].Background.FillFormat.PictureFillFormat.PictureFillMode = PictureFillMode.Stretch;
// Set the picture
System.Drawing.Image img = (System.Drawing.Image)new Bitmap(dataDir + "Tulips.jpg");
// Add image to presentation's images collection
IPPImage imgx = pres.Images.AddImage(img);
pres.Slides[0].Background.FillFormat.PictureFillFormat.Picture.Image = imgx;
// Write the presentation to disk
pres.Save(dataDir + "ContentBG_Img_out.pptx", SaveFormat.Pptx);
}
//ExEnd:SetImageAsBackground
}
}
}
<|start_filename|>Examples/CSharp/Charts/ChartTrendLines.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using System.Drawing;
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class ChartTrendLines
{
public static void Run()
{
//ExStart:ChartTrendLines
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Creating empty presentation
Presentation pres = new Presentation();
// Creating a clustered column chart
IChart chart = pres.Slides[0].Shapes.AddChart(ChartType.ClusteredColumn, 20, 20, 500, 400);
// Adding ponential trend line for chart series 1
ITrendline tredLinep = chart.ChartData.Series[0].TrendLines.Add(TrendlineType.Exponential);
tredLinep.DisplayEquation = false;
tredLinep.DisplayRSquaredValue = false;
// Adding Linear trend line for chart series 1
ITrendline tredLineLin = chart.ChartData.Series[0].TrendLines.Add(TrendlineType.Linear);
tredLineLin.TrendlineType = TrendlineType.Linear;
tredLineLin.Format.Line.FillFormat.FillType = FillType.Solid;
tredLineLin.Format.Line.FillFormat.SolidFillColor.Color = Color.Red;
// Adding Logarithmic trend line for chart series 2
ITrendline tredLineLog = chart.ChartData.Series[1].TrendLines.Add(TrendlineType.Logarithmic);
tredLineLog.TrendlineType = TrendlineType.Logarithmic;
tredLineLog.AddTextFrameForOverriding("New log trend line");
// Adding MovingAverage trend line for chart series 2
ITrendline tredLineMovAvg = chart.ChartData.Series[1].TrendLines.Add(TrendlineType.MovingAverage);
tredLineMovAvg.TrendlineType = TrendlineType.MovingAverage;
tredLineMovAvg.Period = 3;
tredLineMovAvg.TrendlineName = "New TrendLine Name";
// Adding Polynomial trend line for chart series 3
ITrendline tredLinePol = chart.ChartData.Series[2].TrendLines.Add(TrendlineType.Polynomial);
tredLinePol.TrendlineType = TrendlineType.Polynomial;
tredLinePol.Forward = 1;
tredLinePol.Order = 3;
// Adding Power trend line for chart series 3
ITrendline tredLinePower = chart.ChartData.Series[1].TrendLines.Add(TrendlineType.Power);
tredLinePower.TrendlineType = TrendlineType.Power;
tredLinePower.Backward = 1;
// Saving presentation
pres.Save(dataDir + "ChartTrendLines_out.pptx", SaveFormat.Pptx);
//ExEnd:ChartTrendLines
}
}
}
<|start_filename|>Examples/CSharp/Shapes/SimpleEllipse.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class SimpleEllipse
{
public static void Run()
{
//ExStart:SimpleEllipse
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Prseetation class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of ellipse type
sld.Shapes.AddAutoShape(ShapeType.Ellipse, 50, 150, 150, 50);
//Write the PPTX file to disk
pres.Save(dataDir + "EllipseShp1_out.pptx", SaveFormat.Pptx);
}
//ExEnd:SimpleEllipse
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Apply Theme to Presentation/OpenXML Presentation/Program.cs<|end_filename|>
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Apply Theme to Presentation.pptx";
string ThemeFileName = FilePath + "Theme.pptx";
ApplyThemeToPresentation(FileName, ThemeFileName);
}
// Apply a new theme to the presentation.
public static void ApplyThemeToPresentation(string presentationFile, string themePresentation)
{
using (PresentationDocument themeDocument = PresentationDocument.Open(themePresentation, false))
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, true))
{
ApplyThemeToPresentation(presentationDocument, themeDocument);
}
}
// Apply a new theme to the presentation.
public static void ApplyThemeToPresentation(PresentationDocument presentationDocument, PresentationDocument themeDocument)
{
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
if (themeDocument == null)
{
throw new ArgumentNullException("themeDocument");
}
// Get the presentation part of the presentation document.
PresentationPart presentationPart = presentationDocument.PresentationPart;
// Get the existing slide master part.
SlideMasterPart slideMasterPart = presentationPart.SlideMasterParts.ElementAt(0);
string relationshipId = presentationPart.GetIdOfPart(slideMasterPart);
// Get the new slide master part.
SlideMasterPart newSlideMasterPart = themeDocument.PresentationPart.SlideMasterParts.ElementAt(0);
// Remove the existing theme part.
presentationPart.DeletePart(presentationPart.ThemePart);
// Remove the old slide master part.
presentationPart.DeletePart(slideMasterPart);
// Import the new slide master part, and reuse the old relationship ID.
newSlideMasterPart = presentationPart.AddPart(newSlideMasterPart, relationshipId);
// Change to the new theme part.
presentationPart.AddPart(newSlideMasterPart.ThemePart);
Dictionary<string, SlideLayoutPart> newSlideLayouts = new Dictionary<string, SlideLayoutPart>();
foreach (var slideLayoutPart in newSlideMasterPart.SlideLayoutParts)
{
newSlideLayouts.Add(GetSlideLayoutType(slideLayoutPart), slideLayoutPart);
}
string layoutType = null;
SlideLayoutPart newLayoutPart = null;
// Insert the code for the layout for this example.
string defaultLayoutType = "Title and Content";
// Remove the slide layout relationship on all slides.
foreach (var slidePart in presentationPart.SlideParts)
{
layoutType = null;
if (slidePart.SlideLayoutPart != null)
{
// Determine the slide layout type for each slide.
layoutType = GetSlideLayoutType(slidePart.SlideLayoutPart);
// Delete the old layout part.
slidePart.DeletePart(slidePart.SlideLayoutPart);
}
if (layoutType != null && newSlideLayouts.TryGetValue(layoutType, out newLayoutPart))
{
// Apply the new layout part.
slidePart.AddPart(newLayoutPart);
}
else
{
newLayoutPart = newSlideLayouts[defaultLayoutType];
// Apply the new default layout part.
slidePart.AddPart(newLayoutPart);
}
}
}
// Get the slide layout type.
public static string GetSlideLayoutType(SlideLayoutPart slideLayoutPart)
{
CommonSlideData slideData = slideLayoutPart.SlideLayout.CommonSlideData;
// Remarks: If this is used in production code, check for a null reference.
return slideData.Name;
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Setting Background color of Master Slide/VSTO PowerPoint/ThisAddIn.cs<|end_filename|>
using Office = Microsoft.Office.Core;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
namespace VSTO_PowerPoint
{
public partial class ThisAddIn
{
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
string mypath = "";
PowerPoint.Presentation presentation =
Globals.ThisAddIn.Application.Presentations.Open(mypath + "Setting Background Color of Master Slide.ppt", Office.MsoTriState.msoFalse, Office.MsoTriState.msoFalse, Office.MsoTriState.msoTrue);
presentation.SlideMaster.Background.Fill.ForeColor.RGB = -654262273;
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Examples/CSharp/Shapes/AddPlainLineToSlide.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class AddPlainLineToSlide
{
public static void Run()
{
//ExStart:AddPlainLineToSlide
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate PresentationEx class that represents the PPTX file
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add an autoshape of type line
sld.Shapes.AddAutoShape(ShapeType.Line, 50, 150, 300, 0);
//Write the PPTX to Disk
pres.Save(dataDir + "LineShape1_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AddPlainLineToSlide
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Delete a slide/OpenXML Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Delete a slide.pptx";
DeleteSlide(FileName, 1);
}
// Get the presentation object and pass it to the next DeleteSlide method.
public static void DeleteSlide(string presentationFile, int slideIndex)
{
// Open the source document as read/write.
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, true))
{
// Pass the source document and the index of the slide to be deleted to the next DeleteSlide method.
DeleteSlide(presentationDocument, slideIndex);
}
}
// Delete the specified slide from the presentation.
public static void DeleteSlide(PresentationDocument presentationDocument, int slideIndex)
{
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
// Use the CountSlides sample to get the number of slides in the presentation.
int slidesCount = CountSlides(presentationDocument);
if (slideIndex < 0 || slideIndex >= slidesCount)
{
throw new ArgumentOutOfRangeException("slideIndex");
}
// Get the presentation part from the presentation document.
PresentationPart presentationPart = presentationDocument.PresentationPart;
// Get the presentation from the presentation part.
Presentation presentation = presentationPart.Presentation;
// Get the list of slide IDs in the presentation.
SlideIdList slideIdList = presentation.SlideIdList;
// Get the slide ID of the specified slide
SlideId slideId = slideIdList.ChildElements[slideIndex] as SlideId;
// Get the relationship ID of the slide.
string slideRelId = slideId.RelationshipId;
// Remove the slide from the slide list.
slideIdList.RemoveChild(slideId);
//
// Remove references to the slide from all custom shows.
if (presentation.CustomShowList != null)
{
// Iterate through the list of custom shows.
foreach (var customShow in presentation.CustomShowList.Elements<CustomShow>())
{
if (customShow.SlideList != null)
{
// Declare a link list of slide list entries.
LinkedList<SlideListEntry> slideListEntries = new LinkedList<SlideListEntry>();
foreach (SlideListEntry slideListEntry in customShow.SlideList.Elements())
{
// Find the slide reference to remove from the custom show.
if (slideListEntry.Id != null && slideListEntry.Id == slideRelId)
{
slideListEntries.AddLast(slideListEntry);
}
}
// Remove all references to the slide from the custom show.
foreach (SlideListEntry slideListEntry in slideListEntries)
{
customShow.SlideList.RemoveChild(slideListEntry);
}
}
}
}
// Save the modified presentation.
presentation.Save();
// Get the slide part for the specified slide.
SlidePart slidePart = presentationPart.GetPartById(slideRelId) as SlidePart;
// Remove the slide part.
presentationPart.DeletePart(slidePart);
}
// Get the presentation object and pass it to the next CountSlides method.
public static int CountSlides(string presentationFile)
{
// Open the presentation as read-only.
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, false))
{
// Pass the presentation to the next CountSlide method
// and return the slide count.
return CountSlides(presentationDocument);
}
}
// Count the slides in the presentation.
public static int CountSlides(PresentationDocument presentationDocument)
{
// Check for a null document object.
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
int slidesCount = 0;
// Get the presentation part of document.
PresentationPart presentationPart = presentationDocument.PresentationPart;
// Get the slide count from the SlideParts.
if (presentationPart != null)
{
slidesCount = presentationPart.SlideParts.Count();
}
// Return the slide count to the previous method.
return slidesCount;
}
}
}
<|start_filename|>Examples/CSharp/Shapes/FillShapesPattern.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
using System.Drawing;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class FillShapesPattern
{
public static void Run()
{
//ExStart:FillShapesPattern
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Prseetation class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of rectangle type
IShape shp = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 150, 75, 150);
// Set the fill type to Pattern
shp.FillFormat.FillType = FillType.Pattern;
// Set the pattern style
shp.FillFormat.PatternFormat.PatternStyle = PatternStyle.Trellis;
// Set the pattern back and fore colors
shp.FillFormat.PatternFormat.BackColor.Color = Color.LightGray;
shp.FillFormat.PatternFormat.ForeColor.Color = Color.Yellow;
//Write the PPTX file to disk
pres.Save(dataDir + "RectShpPatt_out.pptx", SaveFormat.Pptx);
}
//ExEnd:FillShapesPattern
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Get all the text in all slides/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Get all the text in a slide.pptx";
int numberOfSlides = CountSlides(FileName);
System.Console.WriteLine("Number of slides = {0}", numberOfSlides);
string slideText;
for (int i = 0; i < numberOfSlides; i++)
{
slideText = GetSlideText(FileName, i);
System.Console.WriteLine("Slide #{0} contains: {1}", i + 1, slideText);
}
System.Console.ReadKey();
}
public static int CountSlides(string presentationFile)
{
//Instantiate PresentationEx class that represents PPTX
using (Presentation pres = new Presentation(presentationFile))
{
return pres.Slides.Count;
}
}
public static string GetSlideText(string docName, int index)
{
string sldText = "";
//Instantiate PresentationEx class that represents PPTX
using (Presentation pres = new Presentation(docName))
{
//Access the slide
ISlide sld = pres.Slides[index];
//Iterate through shapes to find the placeholder
foreach (Shape shp in sld.Shapes)
if (shp.Placeholder != null)
{
//get the text of each placeholder
sldText += ((AutoShape)shp).TextFrame.Text;
}
}
return sldText;
}
}
}
<|start_filename|>Plugins/OpenXML/Missing Features/Export media files into html/Program.cs<|end_filename|>
using Aspose.Slides;
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\Sample Files\";
string srcFileName = FilePath + "Conversion.pptx";
string destFileName = "video.html";
//Loading a presentation
using (Presentation pres = new Presentation(srcFileName))
{
const string baseUri = "http://www.example.com/";
VideoPlayerHtmlController controller = new VideoPlayerHtmlController(path: FilePath, fileName: destFileName, baseUri: baseUri);
//Setting HTML options
HtmlOptions htmlOptions = new HtmlOptions(controller);
SVGOptions svgOptions = new SVGOptions(controller);
htmlOptions.HtmlFormatter = HtmlFormatter.CreateCustomFormatter(controller);
htmlOptions.SlideImageFormat = SlideImageFormat.Svg(svgOptions);
//Saving the file
pres.Save(destFileName, SaveFormat.Html, htmlOptions);
}
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Delete all the comments by an author/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
using System;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Delete all the comments by an author.pptx";
string author = "MZ";
DeleteCommentsByAuthorInPresentation(FileName, author);
}
// Remove all the comments in the slides by a certain author.
public static void DeleteCommentsByAuthorInPresentation(string fileName, string author)
{
if (String.IsNullOrEmpty(fileName) || String.IsNullOrEmpty(author))
throw new ArgumentNullException("File name or author name is NULL!");
//Instantiate a PresentationEx object that represents a PPTX file
using (Presentation pres = new Presentation(fileName))
{
ICommentAuthor[] authors= pres.CommentAuthors.FindByName(author);
ICommentAuthor thisAuthor = authors[0];
for (int i = thisAuthor.Comments.Count - 1; i >= 0;i-- )
{
thisAuthor.Comments.RemoveAt(i);
}
pres.Save(fileName, Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Export media files into html/Program.cs<|end_filename|>
using Aspose.Slides;
using Aspose.Slides.Export;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Export_media_files_into_html
{
class Program
{
static void Main(string[] args)
{
//Loading a presentation
using (Presentation pres = new Presentation("example.pptx"))
{
const string path = "path";
const string fileName = "video.html";
const string baseUri = "http://www.example.com/";
VideoPlayerHtmlController controller = new VideoPlayerHtmlController(path: path, fileName: fileName, baseUri: baseUri);
//Setting HTML options
HtmlOptions htmlOptions = new HtmlOptions(controller);
SVGOptions svgOptions = new SVGOptions(controller);
htmlOptions.HtmlFormatter = HtmlFormatter.CreateCustomFormatter(controller);
htmlOptions.SlideImageFormat = SlideImageFormat.Svg(svgOptions);
//Saving the file
pres.Save(path + fileName, SaveFormat.Html, htmlOptions);
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Adding Layout Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using Aspose.Slides.Export;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Adding_Layout_Slides
{
class Program
{
static void Main(string[] args)
{
//Instantiate Presentation class that represents the presentation file
using (Presentation p = new Presentation("Test.pptx"))
{
// Try to search by layout slide type
IMasterLayoutSlideCollection layoutSlides = p.Masters[0].LayoutSlides;
ILayoutSlide layoutSlide =
layoutSlides.GetByType(SlideLayoutType.TitleAndObject) ??
layoutSlides.GetByType(SlideLayoutType.Title);
if (layoutSlide == null)
{
// The situation when a presentation doesn't contain some type of layouts.
// Technographics.pptx presentation only contains Blank and Custom layout types.
// But layout slides with Custom types has different slide names,
// like "Title", "Title and Content", etc. And it is possible to use these
// names for layout slide selection.
// Also it is possible to use the set of placeholder shape types. For example,
// Title slide should have only Title pleceholder type, etc.
foreach (ILayoutSlide titleAndObjectLayoutSlide in layoutSlides)
{
if (titleAndObjectLayoutSlide.Name == "Title and Object")
{
layoutSlide = titleAndObjectLayoutSlide;
break;
}
}
if (layoutSlide == null)
{
foreach (ILayoutSlide titleLayoutSlide in layoutSlides)
{
if (titleLayoutSlide.Name == "Title")
{
layoutSlide = titleLayoutSlide;
break;
}
}
if (layoutSlide == null)
{
layoutSlide = layoutSlides.GetByType(SlideLayoutType.Blank);
if (layoutSlide == null)
{
layoutSlide = layoutSlides.Add(SlideLayoutType.TitleAndObject, "Title and Object");
}
}
}
}
//Adding empty slide with added layout slide
p.Slides.InsertEmptySlide(0, layoutSlide);
//Save presentation
p.Save("Output.pptx", SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Examples/CSharp/Shapes/FillShapesPicture.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
using System.Drawing;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class FillShapesPicture
{
public static void Run()
{
//ExStart:FillShapesPicture
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate PrseetationEx class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of rectangle type
IShape shp = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 150, 75, 150);
// Set the fill type to Picture
shp.FillFormat.FillType = FillType.Picture;
// Set the picture fill mode
shp.FillFormat.PictureFillFormat.PictureFillMode = PictureFillMode.Tile;
// Set the picture
System.Drawing.Image img = (System.Drawing.Image)new Bitmap(dataDir + "Tulips.jpg");
IPPImage imgx = pres.Images.AddImage(img);
shp.FillFormat.PictureFillFormat.Picture.Image = imgx;
//Write the PPTX file to disk
pres.Save(dataDir + "RectShpPic_out.pptx", SaveFormat.Pptx);
//ExEnd:FillShapesPicture
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/User Defined Thumbnail/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using System.Drawing;
using Aspose.Slides;
namespace User_Defined_Thumbnail
{
class Program
{
static void Main(string[] args)
{
string MyDir = @"Files\";
//Instantiate a Presentation class that represents the presentation file
using (Presentation pres = new Presentation(MyDir + "TestPresentation.pptx"))
{
//Access the first slide
ISlide sld = pres.Slides[0];
//User defined dimension
int desiredX = 1200;
int desiredY = 800;
//Getting scaled value of X and Y
float ScaleX = (float)(1.0 / pres.SlideSize.Size.Width) * desiredX;
float ScaleY = (float)(1.0 / pres.SlideSize.Size.Height) * desiredY;
//Create a full scale image
Bitmap bmp = sld.GetThumbnail(ScaleX, ScaleY);
//Save the image to disk in JPEG format
bmp.Save(MyDir + "Thumbnail2.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
}
}
}
}
<|start_filename|>Plugins/OpenXML/Missing Features/Find and Replace/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
using Aspose.Slides.Util;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
findReplaceText("This", "Replaced Text");
}
public static void findReplaceText(string strToFind, string strToReplaceWith)
{
string FilePath = @"..\..\..\Sample Files\";
//Open the presentation
Presentation pres = new Presentation(FilePath + "Find and Replace.pptx");
//Get all text boxes in the presentation
ITextFrame[] tb = SlideUtil.GetAllTextBoxes(pres.Slides[0]);
for (int i = 0; i < tb.Length; i++)
foreach (Paragraph para in tb[i].Paragraphs)
foreach (Portion port in para.Portions)
//Find text to be replaced
if (port.Text.Contains(strToFind))
//Replace exisitng text with the new text
{
string str = port.Text;
int idx = str.IndexOf(strToFind);
string strStartText = str.Substring(0, idx);
string strEndText = str.Substring(idx + strToFind.Length, str.Length - 1 - (idx + strToFind.Length - 1));
port.Text = strStartText + strToReplaceWith + strEndText;
}
pres.Save(FilePath + "Find and Replace.pptx",Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Setting Background color of Master Slide/Aspose Slides/Program.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Export;
using Aspose.Slides.Pptx;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
//Instantiate the Presentation class that represents the presentation file
string mypath = "";
using (PresentationEx pres = new PresentationEx())
{
//Set the background color of the Master ISlide to Forest Green
pres.Masters[0].Background.Type = BackgroundTypeEx.OwnBackground;
pres.Masters[0].Background.FillFormat.FillType = FillTypeEx.Solid;
pres.Masters[0].Background.FillFormat.SolidFillColor.Color = Color.ForestGreen;
//Write the presentation to disk
pres.Save(mypath + "Setting Background Color of Master Slide.pptx", SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Apply Theme to Presentation/Aspose.Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Apply Theme to Presentation.pptx";
string ThemeFileName = FilePath + "Theme.pptx";
ApplyThemeToPresentation(ThemeFileName, FileName);
}
public static void ApplyThemeToPresentation(string presentationFile, string outputFile)
{
//Instantiate Presentation class to load the source presentation file
Presentation srcPres = new Presentation(presentationFile);
//Instantiate Presentation class for destination presentation (where slide is to be cloned)
Presentation destPres = new Presentation(outputFile);
//Instantiate ISlide from the collection of slides in source presentation along with
//master slide
ISlide SourceSlide = srcPres.Slides[0];
//Clone the desired master slide from the source presentation to the collection of masters in the
//destination presentation
IMasterSlideCollection masters = destPres.Masters;
IMasterSlide SourceMaster = SourceSlide.LayoutSlide.MasterSlide;
//Clone the desired master slide from the source presentation to the collection of masters in the
//destination presentation
IMasterSlide iSlide = masters.AddClone(SourceMaster);
//Clone the desired slide from the source presentation with the desired master to the end of the
//collection of slides in the destination presentation
ISlideCollection slds = destPres.Slides;
slds.AddClone(SourceSlide, iSlide, true);
//Clone the desired master slide from the source presentation to the collection of masters in the//destination presentation
//Save the destination presentation to disk
destPres.Save(outputFile, SaveFormat.Pptx);
}
}
}
<|start_filename|>Examples/CSharp/Charts/SecondPlotOptionsforCharts.cs<|end_filename|>
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class SecondPlotOptionsforCharts
{
public static void Run()
{
//ExStart:SecondPlotOptionsforCharts
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create an instance of Presentation class
Presentation presentation = new Presentation();
// Add chart on slide
IChart chart = presentation.Slides[0].Shapes.AddChart(ChartType.PieOfPie, 50, 50, 500, 400);
// Set different properties
chart.ChartData.Series[0].Labels.DefaultDataLabelFormat.ShowValue = true;
chart.ChartData.Series[0].ParentSeriesGroup.SecondPieSize = 149;
chart.ChartData.Series[0].ParentSeriesGroup.PieSplitBy = Aspose.Slides.Charts.PieSplitType.ByPercentage;
chart.ChartData.Series[0].ParentSeriesGroup.PieSplitPosition = 53;
// Write presentation to disk
presentation.Save(dataDir + "SecondPlotOptionsforCharts_out.pptx", SaveFormat.Pptx);
//ExEnd:SecondPlotOptionsforCharts
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Conversion/ConvertPresentationToPasswordProtectedPDF.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Conversion
{
class ConvertPresentationToPasswordProtectedPDF
{
public static void Run()
{
//ExStart:ConvertPresentationToPasswordProtectedPDF
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Conversion();
// Instantiate a Presentation object that represents a presentation file
using (Presentation presentation = new Presentation(dataDir + "DemoFile.pptx"))
{
// Instantiate the PdfOptions class
PdfOptions pdfOptions = new PdfOptions();
// Setting PDF password
pdfOptions.Password = "password";
// Save the presentation to password protected PDF
presentation.Save(dataDir + "PasswordProtectedPDF_out.pdf", SaveFormat.Pdf, pdfOptions);
}
//ExEnd:ConvertPresentationToPasswordProtectedPDF
}
}
}
<|start_filename|>Examples/CSharp/Slides/CRUD/CreateSlidesSVGImage.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.CRUD
{
public class CreateSlidesSVGImage
{
public static void Run()
{
//ExStart:CreateSlidesSVGImage
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_CRUD();
// Instantiate a Presentation class that represents the presentation file
using (Presentation pres = new Presentation(dataDir + "CreateSlidesSVGImage.pptx"))
{
// Access the first slide
ISlide sld = pres.Slides[0];
// Create a memory stream object
MemoryStream SvgStream = new MemoryStream();
// Generate SVG image of slide and save in memory stream
sld.WriteAsSvg(SvgStream);
SvgStream.Position = 0;
// Save memory stream to file
using (Stream fileStream = System.IO.File.OpenWrite(dataDir + "Aspose_out.svg"))
{
byte[] buffer = new byte[8 * 1024];
int len;
while ((len = SvgStream.Read(buffer, 0, buffer.Length)) > 0)
{
fileStream.Write(buffer, 0, len);
}
}
SvgStream.Close();
}
//ExEnd:CreateSlidesSVGImage
}
}
}
<|start_filename|>Examples/CSharp/Shapes/ChangeShapeOrder.cs<|end_filename|>
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class ChangeShapeOrder
{
public static void Run()
{
//ExStart:ChangeShapeOrder
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
Presentation presentation1 = new Presentation(dataDir + "HelloWorld.pptx");
ISlide slide = presentation1.Slides[0];
IAutoShape shp3 = slide.Shapes.AddAutoShape(ShapeType.Rectangle, 200, 365, 400, 150);
shp3.FillFormat.FillType = FillType.NoFill;
shp3.AddTextFrame(" ");
ITextFrame txtFrame = shp3.TextFrame;
IParagraph para = txtFrame.Paragraphs[0];
IPortion portion = para.Portions[0];
portion.Text="Watermark Text Watermark Text Watermark Text";
shp3 = slide.Shapes.AddAutoShape(ShapeType.Triangle, 200, 365, 400, 150);
slide.Shapes.Reorder(2, shp3);
presentation1.Save(dataDir + "Reshape_out.pptx", SaveFormat.Pptx);
//ExEnd:ChangeShapeOrder
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Conversion from ODP to PPTX/Converting From and To ODP/Program.cs<|end_filename|>
using Aspose.Slides.Pptx;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Converting_From_and_To_ODP
{
class Program
{
static void Main(string[] args)
{
ConvertedFromOdp();
ConvertedToOdp();
}
public static void ConvertedToOdp()
{
//Instantiate a Presentation object that represents a presentation file
using (PresentationEx pres = new PresentationEx("ConversionFromPresentation.pptx"))
{
//Saving the PPTX presentation to PPTX format
pres.Save("ConvertedToOdp", Aspose.Slides.Export.SaveFormat.Odp);
}
}
public static void ConvertedFromOdp()
{
//Instantiate a Presentation object that represents a presentation file
using(PresentationEx pres = new PresentationEx("OpenOfficePresentation.odp"))
{
//Saving the PPTX presentation to PPTX format
pres.Save("ConvertedFromOdp",Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Properties/UpdatePresentationPropertiesUsingNewTemplate.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations
{
public class UpdatePresentationPropertiesUsingNewTemplate
{
//ExStart:UpdatePresentationPropertiesUsingNewTemplate
public static void Run()
{
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationProperties();
DocumentProperties template = new DocumentProperties();
template.Author = "Template Author";
template.Title = "Template Title";
template.Category = "Template Category";
template.Keywords = "Keyword1, Keyword2, Keyword3";
template.Company = "Our Company";
template.Comments = "Created from template";
template.ContentType = "Template Content";
template.Subject = "Template Subject";
UpdateByTemplate(dataDir + "doc1.pptx", template);
UpdateByTemplate(dataDir + "doc2.odp", template);
UpdateByTemplate(dataDir + "doc3.ppt", template);
}
private static void UpdateByTemplate(string path, IDocumentProperties template)
{
IPresentationInfo toUpdate = PresentationFactory.Instance.GetPresentationInfo(path);
toUpdate.UpdateDocumentProperties(template);
toUpdate.WriteBindedPresentation(path);
}
//ExEnd:UpdatePresentationPropertiesUsingNewTemplate
}
}
<|start_filename|>Plugins/OpenXML/Missing Features/Converting to Tiff as defined format/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
using Aspose.Slides.Export;
using System.Drawing;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\Sample Files\";
string srcFileName = FilePath + "Conversion.pptx";
string destFileName = FilePath + "Converting to Tiff as defined format.tiff";
//Instantiate a Presentation object that represents a Presentation file
Presentation pres = new Presentation(srcFileName);
//Instantiate the TiffOptions class
Aspose.Slides.Export.TiffOptions opts = new Aspose.Slides.Export.TiffOptions();
//Setting compression type
opts.CompressionType = TiffCompressionTypes.Default;
//Compression Types
//Default - Specifies the default compression scheme (LZW).
//None - Specifies no compression.
//CCITT3
//CCITT4
//LZW
//RLE
//Depth – depends on the compression type and cannot be set manually.
//Resolution unit – is always equal to “2” (dots per inch)
//Setting image DPI
opts.DpiX = 200;
opts.DpiY = 100;
//Set Image Size
opts.ImageSize = new Size(1728, 1078);
//Save the presentation to TIFF with specified image size
pres.Save(destFileName, Aspose.Slides.Export.SaveFormat.Tiff, opts);
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Adding Text Dynamically using VSTO and Aspose.Slides/Aspose Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
//Create a presentation
Presentation pres = new Presentation();
//Blank slide is added by default, when you create
//presentation from default constructor
//So, we don't need to add any blank slide
Slide sld = pres.GetSlideByPosition(1);
//Get the font index for Arial
//It is always 0 if you create presentation from
//default constructor
int arialFontIndex = 0;
//Add a textbox
//To add it, we will first add a rectangle
Shape shp = sld.Shapes.AddRectangle(1200, 800, 3200, 370);
//Hide its line
shp.LineFormat.ShowLines = false;
//Then add a textframe inside it
TextFrame tf = shp.AddTextFrame("");
//Set a text
tf.Text = "Text added dynamically";
Portion port = tf.Paragraphs[0].Portions[0];
port.FontIndex = arialFontIndex;
port.FontBold = true;
port.FontHeight = 32;
//Write the output to disk
pres.Write("outAspose.ppt");
}
}
}
<|start_filename|>Examples/CSharp/Shapes/PictureFrameFormatting.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using System.Drawing;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class PictureFrameFormatting
{
public static void Run()
{
//ExStart:PictureFrameFormatting
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Presentation class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Instantiate the ImageEx class
System.Drawing.Image img = (System.Drawing.Image)new Bitmap(dataDir+ "aspose-logo.jpg");
IPPImage imgx = pres.Images.AddImage(img);
// Add Picture Frame with height and width equivalent of Picture
IPictureFrame pf = sld.Shapes.AddPictureFrame(ShapeType.Rectangle, 50, 150, imgx.Width, imgx.Height, imgx);
// Apply some formatting to PictureFrameEx
pf.LineFormat.FillFormat.FillType = FillType.Solid;
pf.LineFormat.FillFormat.SolidFillColor.Color = Color.Blue;
pf.LineFormat.Width = 20;
pf.Rotation = 45;
//Write the PPTX file to disk
pres.Save(dataDir + "RectPicFrameFormat_out.pptx", SaveFormat.Pptx);
}
//ExEnd:PictureFrameFormatting
}
}
}
<|start_filename|>Plugins/OpenXML/Missing Features/Working With Size and Layout/Program.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\Sample Files\";
string FileName = FilePath + "Working With Size and Layout.pptx";
//Instantiate a Presentation object that represents a presentation file
Presentation presentation = new Presentation(FileName);
Presentation auxPresentation = new Presentation();
ISlide slide = presentation.Slides[0];
//Set the slide size of generated presentations to that of source
auxPresentation.SlideSize.Type = presentation.SlideSize.Type;
auxPresentation.SlideSize.Size = presentation.SlideSize.Size;
auxPresentation.Slides.InsertClone(0, slide);
auxPresentation.Slides.RemoveAt(0);
//Save Presentation to disk
auxPresentation.Save(FileName, Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
<|start_filename|>Plugins/OpenXML/Missing Features/Getting the format of a file/Program.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\Sample Files\";
string FileName = FilePath + "Getting the format of a file.pptx";
IPresentationInfo info;
info = PresentationFactory.Instance.GetPresentationInfo(FileName);
switch (info.LoadFormat)
{
case LoadFormat.Pptx:
{
break;
}
case LoadFormat.Unknown:
{
break;
}
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Create and Embed an Excel Chart as an OLE Object/Aspose Slides/Program.cs<|end_filename|>
using Aspose.Cells;
using Aspose.Cells.Charts;
using Aspose.Slides.Pptx;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
//Create a workbook
Workbook wb = new Workbook();
//Add an excel chart
int chartSheetIndex = AddExcelChartInWorkbook(wb);
wb.Worksheets.SetOleSize(0, 5, 0, 5);
Bitmap imgChart = wb.Worksheets[chartSheetIndex].Charts[0].ToImage();
//Save the workbook to stream
MemoryStream wbStream = wb.SaveToStream();
//Create a presentation
PresentationEx pres = new PresentationEx();
SlideEx sld = pres.Slides[0];
//Add the workbook on slide
AddExcelChartInPresentation(pres, sld, wbStream, imgChart);
//Write the output presentation on disk
pres.Write("chart.pptx");
}
static int AddExcelChartInWorkbook(Workbook wb)
{
//Add a new worksheet to populate cells with data
int dataSheetIdx = wb.Worksheets.Add();
Worksheet dataSheet = wb.Worksheets[dataSheetIdx];
string sheetName = "DataSheet";
dataSheet.Name = sheetName;
//Populate DataSheet with data
dataSheet.Cells["A2"].PutValue("N. America");
dataSheet.Cells["A3"].PutValue("S. America");
dataSheet.Cells["A4"].PutValue("Europe");
dataSheet.Cells["A5"].PutValue("Asia");
dataSheet.Cells["B1"].PutValue("Q1");
dataSheet.Cells["B2"].PutValue(1.5);
dataSheet.Cells["B3"].PutValue(2);
dataSheet.Cells["B4"].PutValue(2.25);
dataSheet.Cells["B5"].PutValue(2.5);
dataSheet.Cells["C1"].PutValue("Q2");
dataSheet.Cells["C2"].PutValue(2);
dataSheet.Cells["C3"].PutValue(1.75);
dataSheet.Cells["C4"].PutValue(2);
dataSheet.Cells["C5"].PutValue(2.5);
dataSheet.Cells["D1"].PutValue("Q3");
dataSheet.Cells["D2"].PutValue(1.5);
dataSheet.Cells["D3"].PutValue(2);
dataSheet.Cells["D4"].PutValue(2.5);
dataSheet.Cells["D5"].PutValue(2);
dataSheet.Cells["E1"].PutValue("Q4");
dataSheet.Cells["E2"].PutValue(2.5);
dataSheet.Cells["E3"].PutValue(2);
dataSheet.Cells["E4"].PutValue(2);
dataSheet.Cells["E5"].PutValue(2.75);
//Add a chart sheet
int chartSheetIdx = wb.Worksheets.Add(SheetType.Chart);
Worksheet chartSheet = wb.Worksheets[chartSheetIdx];
chartSheet.Name = "ChartSheet";
//Add a chart in ChartSheet with data series from DataSheet
int chartIdx = chartSheet.Charts.Add(ChartType.Column3DClustered, 0, 5, 0, 5);
Aspose.Cells.Charts.Chart chart = chartSheet.Charts[chartIdx];
chart.NSeries.Add(sheetName + "!A1:E5", false);
//Setting Chart's Title
chart.Title.Text = "Sales by Quarter";
//Setting the foreground color of the plot area
chart.PlotArea.Area.ForegroundColor = Color.White;
//Setting the background color of the plot area
chart.PlotArea.Area.BackgroundColor = Color.White;
//Setting the foreground color of the chart area
chart.ChartArea.Area.BackgroundColor = Color.White;
chart.Title.TextFont.Size = 16;
//Setting the title of category axis of the chart
chart.CategoryAxis.Title.Text = "Fiscal Quarter";
//Setting the title of value axis of the chart
chart.ValueAxis.Title.Text = "Billions";
//Set ChartSheet an active sheet
wb.Worksheets.ActiveSheetIndex = chartSheetIdx;
return chartSheetIdx;
}
private static void AddExcelChartInPresentation(PresentationEx pres, SlideEx sld, Stream wbStream, Bitmap imgChart)
{
float oleWidth = pres.SlideSize.Size.Width;
float oleHeight = pres.SlideSize.Size.Height;
int x = 0;
byte[] chartOleData = new byte[wbStream.Length];
wbStream.Position = 0;
wbStream.Read(chartOleData, 0, chartOleData.Length);
OleObjectFrameEx oof = null;
oof = sld.Shapes.AddOleObjectFrame(x, 0, oleWidth, oleHeight, "Excel.Sheet.8", chartOleData);
oof.Image = pres.Images.AddImage((System.Drawing.Image)imgChart);
}
}
}
<|start_filename|>Examples/CSharp/Charts/SetDataRange.cs<|end_filename|>
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class SetDataRange
{
public static void Run()
{
//ExStart:SetDataRange
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Instantiate Presentation class that represents PPTX file
Presentation presentation = new Presentation(dataDir + "ExistingChart.pptx");
// Access first slideMarker and add chart with default data
ISlide slide = presentation.Slides[0];
IChart chart = (IChart)slide.Shapes[0];
chart.ChartData.SetRange("Sheet1!A1:B4");
presentation.Save(dataDir + "SetDataRange_out.pptx", SaveFormat.Pptx);
//ExEnd:SetDataRange
}
}
}
<|start_filename|>Examples/CSharp/Shapes/RotatingShapes.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class RotatingShapes
{
public static void Run()
{
//ExStart:RotatingShapes
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate PrseetationEx class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of rectangle type
IShape shp = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 150, 75, 150);
// Rotate the shape to 90 degree
shp.Rotation = 90;
// Write the PPTX file to disk
pres.Save(dataDir + "RectShpRot_out.pptx", SaveFormat.Pptx);
}
//ExEnd:RotatingShapes
}
}
}
<|start_filename|>Examples/CSharp/Charts/AnimatingSeries.cs<|end_filename|>
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides.Animation;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class AnimatingSeries
{
public static void Run()
{
//ExStart:AnimatingSeries
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Instantiate Presentation class that represents a presentation file
using (Presentation presentation = new Presentation(dataDir + "ExistingChart.pptx"))
{
// Get reference of the chart object
var slide = presentation.Slides[0] as Slide;
var shapes = slide.Shapes as ShapeCollection;
var chart = shapes[0] as IChart;
// Animate the series
slide.Timeline.MainSequence.AddEffect(chart, EffectType.Fade, EffectSubtype.None,
EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart,
EffectChartMajorGroupingType.BySeries, 0,
EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart,
EffectChartMajorGroupingType.BySeries, 1,
EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart,
EffectChartMajorGroupingType.BySeries, 2,
EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
((Sequence)slide.Timeline.MainSequence).AddEffect(chart,
EffectChartMajorGroupingType.BySeries, 3,
EffectType.Appear, EffectSubtype.None, EffectTriggerType.AfterPrevious);
// Write the modified presentation to disk
presentation.Save(dataDir + "AnimatingSeries_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AnimatingSeries
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Format Text using VSTO and Aspose.Slides/VSTO Slides/ThisAddIn.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using Office = Microsoft.Office.Core;
namespace VSTO_Slides
{
public partial class ThisAddIn
{
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
PowerPoint.Presentation pres = null;
//Open the presentation
pres = Globals.ThisAddIn.Application.Presentations.Open("source.ppt",
Microsoft.Office.Core.MsoTriState.msoFalse,
Microsoft.Office.Core.MsoTriState.msoFalse,
Microsoft.Office.Core.MsoTriState.msoTrue);
//Access the first slide
PowerPoint.Slide slide = pres.Slides[1];
//Access the third shape
PowerPoint.Shape shp = slide.Shapes[3];
//Change its text's font to Verdana and height to 32
PowerPoint.TextRange txtRange = shp.TextFrame.TextRange;
txtRange.Font.Name = "Verdana";
txtRange.Font.Size = 32;
//Bolden it
txtRange.Font.Bold = Microsoft.Office.Core.MsoTriState.msoCTrue;
//Italicize it
txtRange.Font.Italic = Microsoft.Office.Core.MsoTriState.msoCTrue;
//Change text color
txtRange.Font.Color.RGB = 0x00CC3333;
//Change shape background color
shp.Fill.ForeColor.RGB = 0x00FFCCCC;
//Reposition it horizontally
shp.Left -= 70;
//Write the output to disk
pres.SaveAs("outVSTO.ppt",
PowerPoint.PpSaveAsFileType.ppSaveAsPresentation,
Microsoft.Office.Core.MsoTriState.msoFalse);
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Fill Color of a Shape/Aspose.Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using Aspose.Slides.Export;
using System.Drawing;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Fill color of a shape.pptx";
//Instantiate PrseetationEx class that represents the PPTX
using (Presentation pres = new Presentation())
{
//Get the first slide
ISlide sld = pres.Slides[0];
//Add autoshape of rectangle type
IShape shp = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 150, 75, 150);
//Set the fill type to Solid
shp.FillFormat.FillType = FillType.Solid;
//Set the color of the rectangle
shp.FillFormat.SolidFillColor.Color = Color.Yellow;
//Write the PPTX file to disk
pres.Save(FileName, SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Examples/CSharp/Charts/SetlegendCustomOptions.cs<|end_filename|>
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class SetlegendCustomOptions
{
public static void Run()
{
//ExStart:SetlegendCustomOptions
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create an instance of Presentation class
Presentation presentation = new Presentation();
// Get reference of the slide
ISlide slide = presentation.Slides[0];
// Add a clustered column chart on the slide
IChart chart = slide.Shapes.AddChart(ChartType.ClusteredColumn, 50, 50, 500, 500);
// Set Legend Properties
chart.Legend.X = 50 / chart.Width;
chart.Legend.Y = 50 / chart.Height;
chart.Legend.Width = 100 / chart.Width;
chart.Legend.Height = 100 / chart.Height;
// Write presentation to disk
presentation.Save(dataDir + "Legend_out.pptx", SaveFormat.Pptx);
//ExEnd:SetlegendCustomOptions
}
}
}
<|start_filename|>Examples/CSharp/Shapes/ConnectShapesUsingConnectors.cs<|end_filename|>
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class ConnectShapesUsingConnectors
{
public static void Run()
{
//ExStart:ConnectShapesUsingConnectors
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Instantiate Presentation class that represents the PPTX file
using (Presentation input = new Presentation())
{
// Accessing shapes collection for selected slide
IShapeCollection shapes = input.Slides[0].Shapes;
// Add autoshape Ellipse
IAutoShape ellipse = shapes.AddAutoShape(ShapeType.Ellipse, 0, 100, 100, 100);
// Add autoshape Rectangle
IAutoShape rectangle = shapes.AddAutoShape(ShapeType.Rectangle, 100, 300, 100, 100);
// Adding connector shape to slide shape collection
IConnector connector = shapes.AddConnector(ShapeType.BentConnector2, 0, 0, 10, 10);
// Joining Shapes to connectors
connector.StartShapeConnectedTo = ellipse;
connector.EndShapeConnectedTo = rectangle;
// Call reroute to set the automatic shortest path between shapes
connector.Reroute();
// Saving presenation
input.Save(dataDir + "Connecting shapes using connectors_out.pptx", SaveFormat.Pptx);
}
//ExEnd:ConnectShapesUsingConnectors
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Properties/AccessBuiltinProperties.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations
{
public class AccessBuiltinProperties
{
public static void Run()
{
//ExStart:AccessBuiltinProperties
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationProperties();
// Instantiate the Presentation class that represents the presentation
Presentation pres = new Presentation(dataDir + "AccessBuiltin Properties.pptx");
// Create a reference to IDocumentProperties object associated with Presentation
IDocumentProperties documentProperties = pres.DocumentProperties;
// Display the builtin properties
System.Console.WriteLine("Category : " + documentProperties.Category);
System.Console.WriteLine("Current Status : " + documentProperties.ContentStatus);
System.Console.WriteLine("Creation Date : " + documentProperties.CreatedTime);
System.Console.WriteLine("Author : " + documentProperties.Author);
System.Console.WriteLine("Description : " + documentProperties.Comments);
System.Console.WriteLine("KeyWords : " + documentProperties.Keywords);
System.Console.WriteLine("Last Modified By : " + documentProperties.LastSavedBy);
System.Console.WriteLine("Supervisor : " + documentProperties.Manager);
System.Console.WriteLine("Modified Date : " + documentProperties.LastSavedTime);
System.Console.WriteLine("Presentation Format : " + documentProperties.PresentationFormat);
System.Console.WriteLine("Last Print Date : " + documentProperties.LastPrinted);
System.Console.WriteLine("Is Shared between producers : " + documentProperties.SharedDoc);
System.Console.WriteLine("Subject : " + documentProperties.Subject);
System.Console.WriteLine("Title : " + documentProperties.Title);
//ExEnd:AccessBuiltinProperties
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Opening/OpenPasswordPresentation.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Opening
{
public class OpenPasswordPresentation
{
public static void Run()
{
//ExStart:OpenPasswordPresentation
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationOpening();
// creating instance of load options to set the presentation access password
LoadOptions loadOptions = new LoadOptions();
// Setting the access password
loadOptions.Password = "<PASSWORD>";
// Opening the presentation file by passing the file path and load options to the constructor of Presentation class
Presentation pres = new Presentation(dataDir + "OpenPasswordPresentation.pptx", loadOptions);
// Printing the total number of slides present in the presentation
System.Console.WriteLine(pres.Slides.Count.ToString());
//ExEnd:OpenPasswordPresentation
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Opening/VerifyingPresentationWithoutloading.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Opening
{
class VerifyingPresentationWithoutloading
{
public static void Run()
{
//ExStart:VerifyingPresentationWithoutloading
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationOpening();
LoadFormat format = PresentationFactory.Instance.GetPresentationInfo(dataDir + "HelloWorld.pptx").LoadFormat;
// It will return "LoadFormat.Unknown" if the file is other than presentation formats
//ExEnd:VerifyingPresentationWithoutloading
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Add Picture Frame/Aspose.Slides/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose.Slides
{
class Program
{
static void Main(string[] args)
{
string ImageFilePath = @"E:\Aspose\Aspose Vs VSTO\Aspose.Slides Vs VSTO Presentations v 1.1\Sample Files\AddPicture.jpg";
//Instantiate Prsentation class that represents the PPTX
Presentation pres = new Presentation();
//Get the first slide
ISlide sld = pres.Slides[0];
//Instantiate the ImageEx class
Image img = (Image)new Bitmap(ImageFilePath);
IPPImage imgx = pres.Images.AddImage(img);
//Add Picture Frame with height and width equivalent of Picture
sld.Shapes.AddPictureFrame(ShapeType.Rectangle, 50, 150, imgx.Width, imgx.Height, imgx);
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Adding Slide to Presentation/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Adding Slide to Presentation.pptx";
//Instantiate PresentationEx class that represents the PPT file
Presentation pres = new Presentation();
//Blank slide is added by default, when you create
//presentation from default constructor
//Adding an empty slide to the presentation and getting the reference of
//that empty slide
ISlide slide = pres.Slides.AddEmptySlide(pres.LayoutSlides[0]);
//Write the output to disk
pres.Save(FileName,Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Create a Chart/Aspose Slides/Program.cs<|end_filename|>
using Aspose.Slides.Pptx;
using Aspose.Slides.Pptx.Charts;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
//Create empty presentation
using (PresentationEx pres = new PresentationEx())
{
//Accessing first slide
SlideEx slide = pres.Slides[0];
//Addding default chart
ChartEx ppChart = slide.Shapes.AddChart(ChartTypeEx.ClusteredColumn3D, 20F, 30F, 400F, 300F);
//Getting Chart data
ChartDataEx chartData = ppChart.ChartData;
//Removing Extra default series
chartData.Series.RemoveAt(1);
chartData.Series.RemoveAt(1);
//Modifying chart categories names
chartData.Categories[0].ChartDataCell.Value = "Bikes";
chartData.Categories[1].ChartDataCell.Value = "Accessories";
chartData.Categories[2].ChartDataCell.Value = "Repairs";
chartData.Categories[3].ChartDataCell.Value = "Clothing";
//Modifying chart series values for first category
chartData.Series[0].Values[0].Value = 1000;
chartData.Series[0].Values[1].Value = 2500;
chartData.Series[0].Values[2].Value = 4000;
chartData.Series[0].Values[3].Value = 3000;
//Setting Chart title
ppChart.HasTitle = true;
ppChart.ChartTitle.Text.Text = "2007 Sales";
PortionFormatEx format = ppChart.ChartTitle.Text.Paragraphs[0].Portions[0].PortionFormat;
format.FontItalic = NullableBool.True;
format.FontHeight = 18;
format.FillFormat.FillType = FillTypeEx.Solid;
format.FillFormat.SolidFillColor.Color = Color.Black;
//Setting Axis values
ppChart.ValueAxis.IsAutomaticMaxValue = false;
ppChart.ValueAxis.IsAutomaticMinValue = false;
ppChart.ValueAxis.IsAutomaticMajorUnit = false;
ppChart.ValueAxis.IsAutomaticMinorUnit = false;
ppChart.ValueAxis.MaxValue = 4000.0F;
ppChart.ValueAxis.MinValue = 0.0F;
ppChart.ValueAxis.MajorUnit = 2000.0F;
ppChart.ValueAxis.MinorUnit = 1000.0F;
ppChart.ValueAxis.TickLabelPosition = TickLabelPositionType.NextTo;
//Setting Chart rotation
ppChart.Rotation3D.RotationX = 15;
ppChart.Rotation3D.RotationY = 20;
//Saving Presentation
pres.Write("AsposeSampleChart.pptx");
}
}
}
}
<|start_filename|>Examples/CSharp/Slides/CRUD/ChangePosition.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.CRUD
{
public class ChangePosition
{
public static void Run()
{
//ExStart:ChangePosition
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_CRUD();
// Instantiate Presentation class to load the source presentation file
using (Presentation pres = new Presentation(dataDir + "ChangePosition.pptx"))
{
// Get the slide whose position is to be changed
ISlide sld = pres.Slides[0];
// Set the new position for the slide
sld.SlideNumber = 2;
// Write the presentation to disk
pres.Save(dataDir + "Aspose_out.pptx", SaveFormat.Pptx);
}
//ExEnd:ChangePosition
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Get all the External Hyperlinks/Aspose.Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using System;
using System.Collections.Generic;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Get all the External Eyperlinks.pptx";
//Instantiate a Presentation object that represents a PPTX file
Presentation pres = new Presentation(FileName);
//Get the hyperlinks from presentation
IList<IHyperlinkContainer> links = pres.HyperlinkQueries.GetAnyHyperlinks();
foreach (IHyperlinkContainer link in links)
Console.WriteLine(link.HyperlinkClick.ExternalUrl);
}
}
}
<|start_filename|>Examples/CSharp/Charts/PieChart.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class PieChart
{
public static void Run()
{
//ExStart:PieChart
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Instantiate Presentation class that represents PPTX file
Presentation presentation = new Presentation();
// Access first slide
ISlide slides = presentation.Slides[0];
// Add chart with default data
IChart chart = slides.Shapes.AddChart(ChartType.Pie, 100, 100, 400, 400);
// Setting chart Title
chart.ChartTitle.AddTextFrameForOverriding("Sample Title");
chart.ChartTitle.TextFrameForOverriding.TextFrameFormat.CenterText = NullableBool.True;
chart.ChartTitle.Height = 20;
chart.HasTitle = true;
// Set first series to Show Values
chart.ChartData.Series[0].Labels.DefaultDataLabelFormat.ShowValue = true;
// Setting the index of chart data sheet
int defaultWorksheetIndex = 0;
// Getting the chart data worksheet
IChartDataWorkbook fact = chart.ChartData.ChartDataWorkbook;
// Delete default generated series and categories
chart.ChartData.Series.Clear();
chart.ChartData.Categories.Clear();
// Adding new categories
chart.ChartData.Categories.Add(fact.GetCell(0, 1, 0, "First Qtr"));
chart.ChartData.Categories.Add(fact.GetCell(0, 2, 0, "2nd Qtr"));
chart.ChartData.Categories.Add(fact.GetCell(0, 3, 0, "3rd Qtr"));
// Adding new series
IChartSeries series = chart.ChartData.Series.Add(fact.GetCell(0, 0, 1, "Series 1"), chart.Type);
// Now populating series data
series.DataPoints.AddDataPointForPieSeries(fact.GetCell(defaultWorksheetIndex, 1, 1, 20));
series.DataPoints.AddDataPointForPieSeries(fact.GetCell(defaultWorksheetIndex, 2, 1, 50));
series.DataPoints.AddDataPointForPieSeries(fact.GetCell(defaultWorksheetIndex, 3, 1, 30));
// Not working in new version
// Adding new points and setting sector color
// series.IsColorVaried = true;
chart.ChartData.SeriesGroups[0].IsColorVaried = true;
IChartDataPoint point = series.DataPoints[0];
point.Format.Fill.FillType = FillType.Solid;
point.Format.Fill.SolidFillColor.Color = Color.Cyan;
// Setting Sector border
point.Format.Line.FillFormat.FillType = FillType.Solid;
point.Format.Line.FillFormat.SolidFillColor.Color = Color.Gray;
point.Format.Line.Width = 3.0;
point.Format.Line.Style = LineStyle.ThinThick;
point.Format.Line.DashStyle = LineDashStyle.DashDot;
IChartDataPoint point1 = series.DataPoints[1];
point1.Format.Fill.FillType = FillType.Solid;
point1.Format.Fill.SolidFillColor.Color = Color.Brown;
// Setting Sector border
point1.Format.Line.FillFormat.FillType = FillType.Solid;
point1.Format.Line.FillFormat.SolidFillColor.Color = Color.Blue;
point1.Format.Line.Width = 3.0;
point1.Format.Line.Style = LineStyle.Single;
point1.Format.Line.DashStyle = LineDashStyle.LargeDashDot;
IChartDataPoint point2 = series.DataPoints[2];
point2.Format.Fill.FillType = FillType.Solid;
point2.Format.Fill.SolidFillColor.Color = Color.Coral;
// Setting Sector border
point2.Format.Line.FillFormat.FillType = FillType.Solid;
point2.Format.Line.FillFormat.SolidFillColor.Color = Color.Red;
point2.Format.Line.Width = 2.0;
point2.Format.Line.Style = LineStyle.ThinThin;
point2.Format.Line.DashStyle = LineDashStyle.LargeDashDotDot;
// Create custom labels for each of categories for new series
IDataLabel lbl1 = series.DataPoints[0].Label;
// lbl.ShowCategoryName = true;
lbl1.DataLabelFormat.ShowValue = true;
IDataLabel lbl2 = series.DataPoints[1].Label;
lbl2.DataLabelFormat.ShowValue = true;
lbl2.DataLabelFormat.ShowLegendKey = true;
lbl2.DataLabelFormat.ShowPercentage = true;
IDataLabel lbl3 = series.DataPoints[2].Label;
lbl3.DataLabelFormat.ShowSeriesName = true;
lbl3.DataLabelFormat.ShowPercentage = true;
// Showing Leader Lines for Chart
series.Labels.DefaultDataLabelFormat.ShowLeaderLines = true;
// Setting Rotation Angle for Pie Chart Sectors
chart.ChartData.SeriesGroups[0].FirstSliceAngle = 180;
// Save presentation with chart
presentation.Save(dataDir + "PieChart_out.pptx", SaveFormat.Pptx);
//ExEnd:PieChart
}
}
}
<|start_filename|>Examples/CSharp/Slides/CRUD/CloneToAnotherPresentationWithMaster.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.CRUD
{
public class CloneToAnotherPresentationWithMaster
{
public static void Run()
{
//ExStart:CloneToAnotherPresentationWithMaster
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_CRUD();
// Instantiate Presentation class to load the source presentation file
using (Presentation srcPres = new Presentation(dataDir + "CloneToAnotherPresentationWithMaster.pptx"))
{
// Instantiate Presentation class for destination presentation (where slide is to be cloned)
using (Presentation destPres = new Presentation())
{
// Instantiate ISlide from the collection of slides in source presentation along with
// Master slide
ISlide SourceSlide = srcPres.Slides[0];
IMasterSlide SourceMaster = SourceSlide.LayoutSlide.MasterSlide;
// Clone the desired master slide from the source presentation to the collection of masters in the
// Destination presentation
IMasterSlideCollection masters = destPres.Masters;
IMasterSlide DestMaster = SourceSlide.LayoutSlide.MasterSlide;
// Clone the desired master slide from the source presentation to the collection of masters in the
// Destination presentation
IMasterSlide iSlide = masters.AddClone(SourceMaster);
// Clone the desired slide from the source presentation with the desired master to the end of the
// Collection of slides in the destination presentation
ISlideCollection slds = destPres.Slides;
slds.AddClone(SourceSlide, iSlide, true);
// Clone the desired master slide from the source presentation to the collection of masters in the // Destination presentation
// Save the destination presentation to disk
destPres.Save(dataDir + "CloneToAnotherPresentationWithMaster_out.pptx", SaveFormat.Pptx);
}
}
//ExEnd:CloneToAnotherPresentationWithMaster
}
}
}
<|start_filename|>Examples/CSharp/Charts/ChangeChartCategoryAxis.cs<|end_filename|>
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class ChangeChartCategoryAxis
{
public static void Run()
{
//ExStart:ChangeChartCategoryAxis
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
using (Presentation presentation = new Presentation(dataDir + "ExistingChart.pptx"))
{
IChart chart = presentation.Slides[0].Shapes[0] as IChart;
chart.Axes.HorizontalAxis.CategoryAxisType = CategoryAxisType.Date;
chart.Axes.HorizontalAxis.IsAutomaticMajorUnit = false;
chart.Axes.HorizontalAxis.MajorUnit = 1;
chart.Axes.HorizontalAxis.MajorUnitScale = TimeUnitType.Months;
presentation.Save(dataDir + "ChangeChartCategoryAxis_out.pptx", SaveFormat.Pptx);
}
//ExEnd:ChangeChartCategoryAxis
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Opening/GetPositionCoordinatesofPortion.cs<|end_filename|>
using System;
using System.Drawing;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Opening
{
class GetPositionCoordinatesofPortion
{
public static void Run()
{
//ExStart:GetPositionCoordinatesofPortion
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationOpening();
using (Presentation presentation = new Presentation(dataDir + "Shapes.pptx"))
{
IAutoShape shape = (IAutoShape)presentation.Slides[0].Shapes[0];
var textFrame = (ITextFrame)shape.TextFrame;
foreach (var paragraph in textFrame.Paragraphs)
{
foreach (Portion portion in paragraph.Portions)
{
PointF point = portion.GetCoordinates();
Console.Write(Environment.NewLine + "Corrdinates X =" + point.X + " Corrdinates Y =" + point.Y);
}
}
}
//ExEnd:GetPositionCoordinatesofPortion
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Move a slide to a new position/OpenXML Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using System.Linq;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Move a slide to a new position.pptx";
MoveSlide(FileName, 1, 2);
}
// Counting the slides in the presentation.
public static int CountSlides(string presentationFile)
{
// Open the presentation as read-only.
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, false))
{
// Pass the presentation to the next CountSlides method
// and return the slide count.
return CountSlides(presentationDocument);
}
}
// Count the slides in the presentation.
public static int CountSlides(PresentationDocument presentationDocument)
{
// Check for a null document object.
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
int slidesCount = 0;
// Get the presentation part of document.
PresentationPart presentationPart = presentationDocument.PresentationPart;
// Get the slide count from the SlideParts.
if (presentationPart != null)
{
slidesCount = presentationPart.SlideParts.Count();
}
// Return the slide count to the previous method.
return slidesCount;
}
// Move a slide to a different position in the slide order in the presentation.
public static void MoveSlide(string presentationFile, int from, int to)
{
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, true))
{
MoveSlide(presentationDocument, from, to);
}
}
// Move a slide to a different position in the slide order in the presentation.
public static void MoveSlide(PresentationDocument presentationDocument, int from, int to)
{
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
// Call the CountSlides method to get the number of slides in the presentation.
int slidesCount = CountSlides(presentationDocument);
// Verify that both from and to positions are within range and different from one another.
if (from < 0 || from >= slidesCount)
{
throw new ArgumentOutOfRangeException("from");
}
if (to < 0 || from >= slidesCount || to == from)
{
throw new ArgumentOutOfRangeException("to");
}
// Get the presentation part from the presentation document.
PresentationPart presentationPart = presentationDocument.PresentationPart;
// The slide count is not zero, so the presentation must contain slides.
Presentation presentation = presentationPart.Presentation;
SlideIdList slideIdList = presentation.SlideIdList;
// Get the slide ID of the source slide.
SlideId sourceSlide = slideIdList.ChildElements[from] as SlideId;
SlideId targetSlide = null;
// Identify the position of the target slide after which to move the source slide.
if (to == 0)
{
targetSlide = null;
}
if (from < to)
{
targetSlide = slideIdList.ChildElements[to] as SlideId;
}
else
{
targetSlide = slideIdList.ChildElements[to - 1] as SlideId;
}
// Remove the source slide from its current position.
sourceSlide.Remove();
// Insert the source slide at its new position after the target slide.
slideIdList.InsertAfter(sourceSlide, targetSlide);
// Save the modified presentation.
presentation.Save();
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Delete a slide/Aspose Slides/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Delete a slide.pptx";
DeleteSlide(FileName, 1);
}
public static void DeleteSlide(string presentationFile, int slideIndex)
{
//Instantiate a PresentationEx object that represents a PPTX file
using (Presentation pres = new Presentation(presentationFile))
{
//Accessing a slide using its index in the slides collection
ISlide slide = pres.Slides[slideIndex];
//Removing a slide using its reference
pres.Slides.Remove(slide);
//Writing the presentation as a PPTX file
pres.Save(presentationFile,Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Get the titles of all the slides/OpenXML Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using D = DocumentFormat.OpenXml.Drawing;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Get the titles of all the slides.pptx";
foreach (string s in GetSlideTitles(FileName))
Console.WriteLine(s);
Console.ReadKey();
}
// Get a list of the titles of all the slides in the presentation.
public static IList<string> GetSlideTitles(string presentationFile)
{
// Open the presentation as read-only.
using (PresentationDocument presentationDocument =
PresentationDocument.Open(presentationFile, false))
{
return GetSlideTitles(presentationDocument);
}
}
// Get a list of the titles of all the slides in the presentation.
public static IList<string> GetSlideTitles(PresentationDocument presentationDocument)
{
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
// Get a PresentationPart object from the PresentationDocument object.
PresentationPart presentationPart = presentationDocument.PresentationPart;
if (presentationPart != null &&
presentationPart.Presentation != null)
{
// Get a Presentation object from the PresentationPart object.
Presentation presentation = presentationPart.Presentation;
if (presentation.SlideIdList != null)
{
List<string> titlesList = new List<string>();
// Get the title of each slide in the slide order.
foreach (var slideId in presentation.SlideIdList.Elements<SlideId>())
{
SlidePart slidePart = presentationPart.GetPartById(slideId.RelationshipId) as SlidePart;
// Get the slide title.
string title = GetSlideTitle(slidePart);
// An empty title can also be added.
titlesList.Add(title);
}
return titlesList;
}
}
return null;
}
// Get the title string of the slide.
public static string GetSlideTitle(SlidePart slidePart)
{
if (slidePart == null)
{
throw new ArgumentNullException("presentationDocument");
}
// Declare a paragraph separator.
string paragraphSeparator = null;
if (slidePart.Slide != null)
{
// Find all the title shapes.
var shapes = from shape in slidePart.Slide.Descendants<Shape>()
where IsTitleShape(shape)
select shape;
StringBuilder paragraphText = new StringBuilder();
foreach (var shape in shapes)
{
// Get the text in each paragraph in this shape.
foreach (var paragraph in shape.TextBody.Descendants<D.Paragraph>())
{
// Add a line break.
paragraphText.Append(paragraphSeparator);
foreach (var text in paragraph.Descendants<D.Text>())
{
paragraphText.Append(text.Text);
}
paragraphSeparator = "\n";
}
}
return paragraphText.ToString();
}
return string.Empty;
}
// Determines whether the shape is a title shape.
private static bool IsTitleShape(Shape shape)
{
var placeholderShape = shape.NonVisualShapeProperties.ApplicationNonVisualDrawingProperties.GetFirstChild<PlaceholderShape>();
if (placeholderShape != null && placeholderShape.Type != null && placeholderShape.Type.HasValue)
{
switch ((PlaceholderValues)placeholderShape.Type)
{
// Any title shape.
case PlaceholderValues.Title:
// A centered title.
case PlaceholderValues.CenteredTitle:
return true;
default:
return false;
}
}
return false;
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Find and Replace Text without Losing Format/Aspose Slides/Program.cs<|end_filename|>
using Aspose.Slides;
using Aspose.Slides.Util;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose_Slides
{
class Program
{
static void Main(string[] args)
{
findReplaceText("d", "Aspose");
}
private static void findReplaceText(string strToFind, string strToReplaceWith)
{
//Open the presentation
Presentation pres = new Presentation("mytextone.ppt");
//Get all text boxes in the presentation
ITextBox[] tb = PresentationScanner.GetAllTextBoxes(pres, false);
for (int i = 0; i < tb.Length; i++)
foreach (Paragraph para in tb[i].Paragraphs)
foreach (Portion port in para.Portions)
//Find text to be replaced
if (port.Text.Contains(strToFind))
//Replace exisitng text with the new text
{
string str = port.Text;
int idx = str.IndexOf(strToFind);
string strStartText = str.Substring(0, idx);
string strEndText = str.Substring(idx + strToFind.Length, str.Length - 1 - (idx + strToFind.Length - 1));
port.Text = strStartText + strToReplaceWith + strEndText;
}
pres.Write("myTextOneAspose.ppt");
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Find and Replace Text without Losing Format/VSTO Slides/ThisAddIn.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using Office = Microsoft.Office.Core;
namespace VSTO_Slides
{
public partial class ThisAddIn
{
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
findReplaceText("Aspose", "Aspose for .Net");
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
private void findReplaceText(string strToFind, string strToReplaceWith)
{
//Open the presentation
PowerPoint.Presentation pres = null;
pres = Globals.ThisAddIn.Application.Presentations.Open("mytextone.ppt",
Microsoft.Office.Core.MsoTriState.msoFalse,
Microsoft.Office.Core.MsoTriState.msoFalse,
Microsoft.Office.Core.MsoTriState.msoFalse);
//Loop through slides
foreach (PowerPoint.Slide sld in pres.Slides)
//Loop through all shapes in slide
foreach (PowerPoint.Shape shp in sld.Shapes)
{
//Access text in the shape
string str = shp.TextFrame.TextRange.Text;
//Find text to replace
if (str.Contains(strToFind))
//Replace exisitng text with the new text
{
int idx = str.IndexOf(strToFind);
string strStartText = str.Substring(0, idx);
string strEndText = str.Substring(idx + strToFind.Length, str.Length - 1 - (idx + strToFind.Length - 1));
shp.TextFrame.TextRange.Text = strStartText + strToReplaceWith + strEndText;
}
pres.SaveAs("MyTextOne___.ppt",
PowerPoint.PpSaveAsFileType.ppSaveAsPresentation,
Microsoft.Office.Core.MsoTriState.msoFalse);
}
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Move a Paragraph/Aspose.Slides/Program.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Move a Paragraph from One Presentation to Another 1.pptx";
string DestFileName = FilePath + "Move a Paragraph from One Presentation to Another 2.pptx";
MoveParagraphToPresentation(FileName, DestFileName);
}
// Moves a paragraph range in a TextBody shape in the source document
// to another TextBody shape in the target document.
public static void MoveParagraphToPresentation(string sourceFile, string targetFile)
{
string Text = "";
//Instantiate Presentation class that represents PPTX//Instantiate Presentation class that represents PPTX
Presentation sourcePres = new Presentation(sourceFile);
//Access first shape in first slide
IShape shp = sourcePres.Slides[0].Shapes[0];
if (shp.Placeholder != null)
{
//Get text from placeholder
Text = ((IAutoShape)shp).TextFrame.Text;
((IAutoShape)shp).TextFrame.Text = "";
}
Presentation destPres = new Presentation(targetFile);
//Access first shape in first slide
IShape destshp = sourcePres.Slides[0].Shapes[0];
if (destshp.Placeholder != null)
{
//Get text from placeholder
((IAutoShape)destshp).TextFrame.Text += Text;
}
sourcePres.Save(sourceFile, Aspose.Slides.Export.SaveFormat.Pptx);
destPres.Save(targetFile, Aspose.Slides.Export.SaveFormat.Pptx);
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Properties/AccessOpenDoc.cs<|end_filename|>
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Properties
{
public class AccessOpenDoc
{
public static void Run()
{
//ExStart:AccessOpenDoc
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationProperties();
// Open the ODP file
Presentation pres = new Presentation(dataDir + "AccessOpenDoc.odp");
// Saving the ODP presentation to PPTX format
pres.Save(dataDir + "AccessOpenDoc_out.pptx", SaveFormat.Pptx);
//ExEnd:AccessOpenDoc
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Slide Thumbnail to JPEG/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using System.Drawing;
using Aspose.Slides;
namespace Slide_Thumbnail_to_JPEG
{
class Program
{
static void Main(string[] args)
{
string MyDir = @"Files\";
//Instantiate a Presentation class that represents the presentation file
using (Presentation pres = new Presentation(MyDir + "Slides Test Presentation.pptx"))
{
//Access the first slide
ISlide sld = pres.Slides[0];
//Create a full scale image
Bitmap bmp = sld.GetThumbnail(1f, 1f);
//Save the image to disk in JPEG format
bmp.Save(MyDir + "Test Thumbnail.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Create a New Presentation/VSTO Slides/ThisAddIn.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using Office = Microsoft.Office.Core;
namespace VSTO_Slides
{
public partial class ThisAddIn
{
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
PowerPoint.Presentation pres = Globals.ThisAddIn.Application
.Presentations.Add(Microsoft.Office.Core.MsoTriState.msoFalse);
//Get the title slide layout
PowerPoint.CustomLayout layout = pres.SlideMaster.
CustomLayouts[PowerPoint.PpSlideLayout.ppLayoutTitle];
//Add a title slide.
PowerPoint.Slide slide = pres.Slides.AddSlide(1, layout);
//Set the title text
slide.Shapes.Title.TextFrame.TextRange.Text = "Slide Title Heading";
//Set the sub title text
slide.Shapes[2].TextFrame.TextRange.Text = "Slide Title Sub-Heading";
//Write the output to disk
pres.SaveAs("outVSTO.ppt",
PowerPoint.PpSaveAsFileType.ppSaveAsPresentation,
Microsoft.Office.Core.MsoTriState.msoFalse);
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Examples/CSharp/Shapes/AddVideoFrame.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class AddVideoFrame
{
public static void Run()
{
//ExStart:AddVideoFrame
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate PrseetationEx class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add Video Frame
IVideoFrame vf = sld.Shapes.AddVideoFrame(50, 150, 300, 150, dataDir+ "video1.avi");
// Set Play Mode and Volume of the Video
vf.PlayMode = VideoPlayModePreset.Auto;
vf.Volume = AudioVolumeMode.Loud;
//Write the PPTX file to disk
pres.Save(dataDir + "VideoFrame_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AddVideoFrame
}
}
}
<|start_filename|>Examples/CSharp/Shapes/SetAlternativeText.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class SetAlternativeText
{
public static void Run()
{
//ExStart:SetAlternativeText
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Instantiate Presentation class that represents the PPTX
Presentation pres = new Presentation();
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of rectangle type
IShape shp1 = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 40, 150, 50);
IShape shp2 = sld.Shapes.AddAutoShape(ShapeType.Moon, 160, 40, 150, 50);
shp2.FillFormat.FillType = FillType.Solid;
shp2.FillFormat.SolidFillColor.Color = Color.Gray;
for (int i = 0; i < sld.Shapes.Count; i++)
{
var shape = sld.Shapes[i] as AutoShape;
if (shape != null)
{
AutoShape ashp = shape;
ashp.AlternativeText = "User Defined";
}
}
// Save presentation to disk
pres.Save(dataDir + "Set_AlternativeText_out.pptx", SaveFormat.Pptx);
//ExEnd:SetAlternativeText
}
}
}
<|start_filename|>Examples/CSharp/Slides/Background/SetSlideBackgroundMaster.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Export;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Slides.Background
{
public class SetSlideBackgroundMaster
{
public static void Run()
{
//ExStart:SetSlideBackgroundMaster
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Slides_Presentations_Background();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate the Presentation class that represents the presentation file
using (Presentation pres = new Presentation())
{
// Set the background color of the Master ISlide to Forest Green
pres.Masters[0].Background.Type = BackgroundType.OwnBackground;
pres.Masters[0].Background.FillFormat.FillType = FillType.Solid;
pres.Masters[0].Background.FillFormat.SolidFillColor.Color = Color.ForestGreen;
// Write the presentation to disk
pres.Save(dataDir + "SetSlideBackgroundMaster_out.pptx", SaveFormat.Pptx);
}
//ExEnd:SetSlideBackgroundMaster
}
}
}
<|start_filename|>Examples/CSharp/Shapes/AnimationsOnShapes.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Export;
using Aspose.Slides.Animation;
using System.Drawing;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class AnimationsOnShapes
{
public static void Run()
{
//ExStart:AnimationsOnShapes
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate PrseetationEx class that represents the PPTX
using (Presentation pres = new Presentation())
{
ISlide sld = pres.Slides[0];
// Now create effect "PathFootball" for existing shape from scratch.
IAutoShape ashp = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 150, 150, 250, 25);
ashp.AddTextFrame("Animated TextBox");
// Add PathFootBall animation effect
pres.Slides[0].Timeline.MainSequence.AddEffect(ashp, EffectType.PathFootball,
EffectSubtype.None, EffectTriggerType.AfterPrevious);
// Create some kind of "button".
IShape shapeTrigger = pres.Slides[0].Shapes.AddAutoShape(ShapeType.Bevel, 10, 10, 20, 20);
// Create sequence of effects for this button.
ISequence seqInter = pres.Slides[0].Timeline.InteractiveSequences.Add(shapeTrigger);
// Create custom user path. Our object will be moved only after "button" click.
IEffect fxUserPath = seqInter.AddEffect(ashp, EffectType.PathUser, EffectSubtype.None, EffectTriggerType.OnClick);
// Created path is empty so we should add commands for moving.
IMotionEffect motionBhv = ((IMotionEffect)fxUserPath.Behaviors[0]);
PointF[] pts = new PointF[1];
pts[0] = new PointF(0.076f, 0.59f);
motionBhv.Path.Add(MotionCommandPathType.LineTo, pts, MotionPathPointsType.Auto, true);
pts[0] = new PointF(-0.076f, -0.59f);
motionBhv.Path.Add(MotionCommandPathType.LineTo, pts, MotionPathPointsType.Auto, false);
motionBhv.Path.Add(MotionCommandPathType.End, null, MotionPathPointsType.Auto, false);
//Write the presentation as PPTX to disk
pres.Save(dataDir + "AnimExample_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AnimationsOnShapes
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Saving/RemoveWriteProtection.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Saving
{
public class RemoveWriteProtection
{
public static void Run()
{
//ExStart:RemoveWriteProtection
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationSaving();
// Opening the presentation file
Presentation presentation = new Presentation(dataDir + "RemoveWriteProtection.pptx");
// Checking if presentation is write protected
if (presentation.ProtectionManager.IsWriteProtected)
// Removing Write protection
presentation.ProtectionManager.RemoveWriteProtection();
// Saving presentation
presentation.Save(dataDir + "File_Without_WriteProtection_out.pptx", Aspose.Slides.Export.SaveFormat.Pptx);
//ExEnd:RemoveWriteProtection
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Move a Paragraph/OpenXML Presentation/Program.cs<|end_filename|>
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System.Linq;
using Drawing = DocumentFormat.OpenXml.Drawing;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Move a Paragraph from One Presentation to Another 1.pptx";
string DestFileName = FilePath + "Move a Paragraph from One Presentation to Another 2.pptx";
MoveParagraphToPresentation(FileName, DestFileName);
}
// Moves a paragraph range in a TextBody shape in the source document
// to another TextBody shape in the target document.
public static void MoveParagraphToPresentation(string sourceFile, string targetFile)
{
// Open the source file as read/write.
using (PresentationDocument sourceDoc = PresentationDocument.Open(sourceFile, true))
{
// Open the target file as read/write.
using (PresentationDocument targetDoc = PresentationDocument.Open(targetFile, true))
{
// Get the first slide in the source presentation.
SlidePart slide1 = GetFirstSlide(sourceDoc);
// Get the first TextBody shape in it.
TextBody textBody1 = slide1.Slide.Descendants<TextBody>().First();
// Get the first paragraph in the TextBody shape.
// Note: "Drawing" is the alias of namespace DocumentFormat.OpenXml.Drawing
Drawing.Paragraph p1 = textBody1.Elements<Drawing.Paragraph>().First();
// Get the first slide in the target presentation.
SlidePart slide2 = GetFirstSlide(targetDoc);
// Get the first TextBody shape in it.
TextBody textBody2 = slide2.Slide.Descendants<TextBody>().First();
// Clone the source paragraph and insert the cloned. paragraph into the target TextBody shape.
// Passing "true" creates a deep clone, which creates a copy of the
// Paragraph object and everything directly or indirectly referenced by that object.
textBody2.Append(p1.CloneNode(true));
// Remove the source paragraph from the source file.
textBody1.RemoveChild<Drawing.Paragraph>(p1);
// Replace the removed paragraph with a placeholder.
textBody1.AppendChild<Drawing.Paragraph>(new Drawing.Paragraph());
// Save the slide in the source file.
slide1.Slide.Save();
// Save the slide in the target file.
slide2.Slide.Save();
}
}
}
// Get the slide part of the first slide in the presentation document.
public static SlidePart GetFirstSlide(PresentationDocument presentationDocument)
{
// Get relationship ID of the first slide
PresentationPart part = presentationDocument.PresentationPart;
SlideId slideId = part.Presentation.SlideIdList.GetFirstChild<SlideId>();
string relId = slideId.RelationshipId;
// Get the slide part by the relationship ID.
SlidePart slidePart = (SlidePart)part.GetPartById(relId);
return slidePart;
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Add Table on Slide/VSTO Presentation/ThisAddIn.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml.Linq;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using Office = Microsoft.Office.Core;
namespace VSTO_Presentation
{
public partial class ThisAddIn
{
private void ThisAddIn_Startup(object sender, System.EventArgs e)
{
//Add a blank slide
PowerPoint.Slide sld = Application.ActivePresentation.Slides[0];
//Add a 15 x 15 table
PowerPoint.Shape shp = sld.Shapes.AddTable(15, 15, 10, 10, 200, 300);
PowerPoint.Table tbl = shp.Table;
int i = -1;
int j = -1;
//Loop through all the rows
foreach (PowerPoint.Row row in tbl.Rows)
{
i = i + 1;
j = -1;
//Loop through all the cells in the row
foreach (PowerPoint.Cell cell in row.Cells)
{
j = j + 1;
//Get text frame of each cell
PowerPoint.TextFrame tf = cell.Shape.TextFrame;
//Add some text
tf.TextRange.Text = "T" + i.ToString() + j.ToString();
//Set font size of the text as 10
tf.TextRange.Paragraphs(0, tf.TextRange.Text.Length).Font.Size = 10;
}
}
}
private void ThisAddIn_Shutdown(object sender, System.EventArgs e)
{
}
#region VSTO generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InternalStartup()
{
this.Startup += new System.EventHandler(ThisAddIn_Startup);
this.Shutdown += new System.EventHandler(ThisAddIn_Shutdown);
}
#endregion
}
}
<|start_filename|>Examples/CSharp/Charts/AddErrorBars.cs<|end_filename|>
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class AddErrorBars
{
public static void Run()
{
//ExStart:AddErrorBars
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Creating empty presentation
using (Presentation presentation = new Presentation())
{
// Creating a bubble chart
IChart chart = presentation.Slides[0].Shapes.AddChart(ChartType.Bubble, 50, 50, 400, 300, true);
// Adding Error bars and setting its format
IErrorBarsFormat errBarX = chart.ChartData.Series[0].ErrorBarsXFormat;
IErrorBarsFormat errBarY = chart.ChartData.Series[0].ErrorBarsYFormat;
errBarX.IsVisible = true;
errBarY.IsVisible = true;
errBarX.ValueType = ErrorBarValueType.Fixed;
errBarX.Value = 0.1f;
errBarY.ValueType = ErrorBarValueType.Percentage;
errBarY.Value = 5;
errBarX.Type = ErrorBarType.Plus;
errBarY.Format.Line.Width = 2;
errBarX.HasEndCap = true;
// Saving presentation
presentation.Save(dataDir + "ErrorBars_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AddErrorBars
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Aspose.Slides Features missing in VSTO/Rendering Shapes and Slide to Images/Rendering Individual Shapes as Images/Program.cs<|end_filename|>
using Aspose.Slides;
using Aspose.Slides.Pptx;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Rendering_Individual_Shapes_as_Images
{
class Program
{
static void Main(string[] args)
{
string path = @"Files\";
//Instantiate a Presentation object that represents a PPT file
Presentation pres = new Presentation(path + "RenderShapeAsImage.ppt");
//Accessing a slide using its slide position
Slide slide = pres.GetSlideByPosition(2);
//Iterate all shapes on a slide and create thumbnails
ShapeCollection shapes = slide.Shapes;
for (int i = 0; i < shapes.Count; i++)
{
Shape shape = shapes[i];
//Getting the thumbnail image of the shape
Image img = slide.GetThumbnail(new object[] { shape }, 1.0, 1.0,shape.ShapeRectangle);
//Saving the thumbnail image in gif format
img.Save(path + i + ".gif", ImageFormat.Gif);
}
}
}
}
<|start_filename|>Examples/CSharp/Charts/ScatteredChart.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class ScatteredChart
{
public static void Run()
{
//ExStart:ScatteredChart
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
Presentation pres = new Presentation();
ISlide slide = pres.Slides[0];
// Creating the default chart
IChart chart = slide.Shapes.AddChart(ChartType.ScatterWithSmoothLines, 0, 0, 400, 400);
// Getting the default chart data worksheet index
int defaultWorksheetIndex = 0;
// Getting the chart data worksheet
IChartDataWorkbook fact = chart.ChartData.ChartDataWorkbook;
// Delete demo series
chart.ChartData.Series.Clear();
// Add new series
chart.ChartData.Series.Add(fact.GetCell(defaultWorksheetIndex, 1, 1, "Series 1"), chart.Type);
chart.ChartData.Series.Add(fact.GetCell(defaultWorksheetIndex, 1, 3, "Series 2"), chart.Type);
// Take first chart series
IChartSeries series = chart.ChartData.Series[0];
// Add new point (1:3) there.
series.DataPoints.AddDataPointForScatterSeries(fact.GetCell(defaultWorksheetIndex, 2, 1, 1), fact.GetCell(defaultWorksheetIndex, 2, 2, 3));
// Add new point (2:10)
series.DataPoints.AddDataPointForScatterSeries(fact.GetCell(defaultWorksheetIndex, 3, 1, 2), fact.GetCell(defaultWorksheetIndex, 3, 2, 10));
// Edit the type of series
series.Type = ChartType.ScatterWithStraightLinesAndMarkers;
// Changing the chart series marker
series.Marker.Size = 10;
series.Marker.Symbol = MarkerStyleType.Star;
// Take second chart series
series = chart.ChartData.Series[1];
// Add new point (5:2) there.
series.DataPoints.AddDataPointForScatterSeries(fact.GetCell(defaultWorksheetIndex, 2, 3, 5), fact.GetCell(defaultWorksheetIndex, 2, 4, 2));
// Add new point (3:1)
series.DataPoints.AddDataPointForScatterSeries(fact.GetCell(defaultWorksheetIndex, 3, 3, 3), fact.GetCell(defaultWorksheetIndex, 3, 4, 1));
// Add new point (2:2)
series.DataPoints.AddDataPointForScatterSeries(fact.GetCell(defaultWorksheetIndex, 4, 3, 2), fact.GetCell(defaultWorksheetIndex, 4, 4, 2));
// Add new point (5:1)
series.DataPoints.AddDataPointForScatterSeries(fact.GetCell(defaultWorksheetIndex, 5, 3, 5), fact.GetCell(defaultWorksheetIndex, 5, 4, 1));
// Changing the chart series marker
series.Marker.Size = 10;
series.Marker.Symbol = MarkerStyleType.Circle;
pres.Save(dataDir + "AsposeChart_out.pptx", SaveFormat.Pptx);
//ExEnd:ScatteredChart
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Get all the text in a slide/OpenXML Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Get all the text in a slide.pptx";
foreach (string s in GetAllTextInSlide(FileName, 0))
Console.WriteLine(s);
Console.ReadKey();
}
// Get all the text in a slide.
public static string[] GetAllTextInSlide(string presentationFile, int slideIndex)
{
// Open the presentation as read-only.
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, false))
{
// Pass the presentation and the slide index
// to the next GetAllTextInSlide method, and
// then return the array of strings it returns.
return GetAllTextInSlide(presentationDocument, slideIndex);
}
}
public static string[] GetAllTextInSlide(PresentationDocument presentationDocument, int slideIndex)
{
// Verify that the presentation document exists.
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
// Verify that the slide index is not out of range.
if (slideIndex < 0)
{
throw new ArgumentOutOfRangeException("slideIndex");
}
// Get the presentation part of the presentation document.
PresentationPart presentationPart = presentationDocument.PresentationPart;
// Verify that the presentation part and presentation exist.
if (presentationPart != null && presentationPart.Presentation != null)
{
// Get the Presentation object from the presentation part.
Presentation presentation = presentationPart.Presentation;
// Verify that the slide ID list exists.
if (presentation.SlideIdList != null)
{
// Get the collection of slide IDs from the slide ID list.
DocumentFormat.OpenXml.OpenXmlElementList slideIds =
presentation.SlideIdList.ChildElements;
// If the slide ID is in range...
if (slideIndex < slideIds.Count)
{
// Get the relationship ID of the slide.
string slidePartRelationshipId = (slideIds[slideIndex] as SlideId).RelationshipId;
// Get the specified slide part from the relationship ID.
SlidePart slidePart =
(SlidePart)presentationPart.GetPartById(slidePartRelationshipId);
// Pass the slide part to the next method, and
// then return the array of strings that method
// returns to the previous method.
return GetAllTextInSlide(slidePart);
}
}
}
// Else, return null.
return null;
}
public static string[] GetAllTextInSlide(SlidePart slidePart)
{
// Verify that the slide part exists.
if (slidePart == null)
{
throw new ArgumentNullException("slidePart");
}
// Create a new linked list of strings.
LinkedList<string> texts = new LinkedList<string>();
// If the slide exists...
if (slidePart.Slide != null)
{
// Iterate through all the paragraphs in the slide.
foreach (DocumentFormat.OpenXml.Drawing.Paragraph paragraph in
slidePart.Slide.Descendants<DocumentFormat.OpenXml.Drawing.Paragraph>())
{
// Create a new string builder.
StringBuilder paragraphText = new StringBuilder();
// Iterate through the lines of the paragraph.
foreach (DocumentFormat.OpenXml.Drawing.Text text in
paragraph.Descendants<DocumentFormat.OpenXml.Drawing.Text>())
{
// Append each line to the previous lines.
paragraphText.Append(text.Text);
}
if (paragraphText.Length > 0)
{
// Add each paragraph to the linked list.
texts.AddLast(paragraphText.ToString());
}
}
}
if (texts.Count > 0)
{
// Return an array of strings.
return texts.ToArray();
}
else
{
return null;
}
}
}
}
<|start_filename|>Examples/CSharp/Shapes/FillShapeswithSolidColor.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class FillShapeswithSolidColor
{
public static void Run()
{
//ExStart:FillShapeswithSolidColor
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create an instance of Presentation class
Presentation presentation = new Presentation();
// Get the first slide
ISlide slide = presentation.Slides[0];
// Add autoshape of rectangle type
IShape shape = slide.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 150, 75, 150);
// Set the fill type to Solid
shape.FillFormat.FillType = FillType.Solid;
// Set the color of the rectangle
shape.FillFormat.SolidFillColor.Color = Color.Yellow;
//Write the PPTX file to disk
presentation.Save(dataDir + "RectShpSolid_out.pptx", SaveFormat.Pptx);
//ExEnd:FillShapeswithSolidColor
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Delete all the comments by an author/OpenXML Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml;
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml.Presentation;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Delete all the comments by an author.pptx";
string author = "<NAME>";
DeleteCommentsByAuthorInPresentation(FileName, author);
}
// Remove all the comments in the slides by a certain author.
public static void DeleteCommentsByAuthorInPresentation(string fileName, string author)
{
if (String.IsNullOrEmpty(fileName) || String.IsNullOrEmpty(author))
throw new ArgumentNullException("File name or author name is NULL!");
using (PresentationDocument doc = PresentationDocument.Open(fileName, true))
{
// Get the specifed comment author.
IEnumerable<CommentAuthor> commentAuthors =
doc.PresentationPart.CommentAuthorsPart.CommentAuthorList.Elements<CommentAuthor>()
.Where(e => e.Name.Value.Equals(author));
// Iterate through all the matching authors.
foreach (CommentAuthor commentAuthor in commentAuthors)
{
UInt32Value authorId = commentAuthor.Id;
// Iterate through all the slides and get the slide parts.
foreach (SlidePart slide in doc.PresentationPart.SlideParts)
{
SlideCommentsPart slideCommentsPart = slide.SlideCommentsPart;
// Get the list of comments.
if (slideCommentsPart != null && slide.SlideCommentsPart.CommentList != null)
{
IEnumerable<Comment> commentList =
slideCommentsPart.CommentList.Elements<Comment>().Where(e => e.AuthorId == authorId.Value);
List<Comment> comments = new List<Comment>();
comments = commentList.ToList<Comment>();
foreach (Comment comm in comments)
{
// Delete all the comments by the specified author.
slideCommentsPart.CommentList.RemoveChild<Comment>(comm);
}
// If the commentPart has no existing comment.
if (slideCommentsPart.CommentList.ChildElements.Count == 0)
// Delete this part.
slide.DeletePart(slideCommentsPart);
}
}
// Delete the comment author from the comment authors part.
doc.PresentationPart.CommentAuthorsPart.CommentAuthorList.RemoveChild<CommentAuthor>(commentAuthor);
}
}
}
}
}
<|start_filename|>Plugins/Aspose.Slides Vs VSTO Presentations/Code Comparison of Common Features/Opening a Presentation/Aspose.Slides/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aspose.Slides
{
class Program
{
static void Main(string[] args)
{
string FileName = @"E:\Aspose\Aspose Vs VSTO\Aspose.Slides Vs VSTO Presentations v 1.1\Sample Files\Open Presentation.pptx";
Presentation MyPresentation = new Presentation(FileName);
}
}
}
<|start_filename|>Examples/CSharp/Shapes/FormatLines.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using System.Drawing;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Shapes
{
public class FormatLines
{
public static void Run()
{
//ExStart:FormatLines
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate Prseetation class that represents the PPTX
using (Presentation pres = new Presentation())
{
// Get the first slide
ISlide sld = pres.Slides[0];
// Add autoshape of rectangle type
IShape shp = sld.Shapes.AddAutoShape(ShapeType.Rectangle, 50, 150, 150, 75);
// Set the fill color of the rectangle shape
shp.FillFormat.FillType = FillType.Solid;
shp.FillFormat.SolidFillColor.Color = Color.White;
// Apply some formatting on the line of the rectangle
shp.LineFormat.Style = LineStyle.ThickThin;
shp.LineFormat.Width = 7;
shp.LineFormat.DashStyle = LineDashStyle.Dash;
// Set the color of the line of rectangle
shp.LineFormat.FillFormat.FillType = FillType.Solid;
shp.LineFormat.FillFormat.SolidFillColor.Color = Color.Blue;
//Write the PPTX file to disk
pres.Save(dataDir + "RectShpLn_out.pptx", SaveFormat.Pptx);
}
//ExEnd:FormatLines
}
}
}
<|start_filename|>Examples/CSharp/Shapes/AddRelativeScaleHeightPictureFrame.cs<|end_filename|>
using System.Drawing;
using Aspose.Slides.Export;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class AddRelativeScaleHeightPictureFrame
{
public static void Run()
{
//ExStart:AddRelativeScaleHeightPictureFrame
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Instantiate presentation object
using (Presentation presentation = new Presentation())
{
// Load Image to be added in presentaiton image collection
Image img = new Bitmap(dataDir + "aspose-logo.jpg");
IPPImage image = presentation.Images.AddImage(img);
// Add picture frame to slide
IPictureFrame pf = presentation.Slides[0].Shapes.AddPictureFrame(ShapeType.Rectangle, 50, 50, 100, 100, image);
// Setting relative scale width and height
pf.RelativeScaleHeight = 0.8f;
pf.RelativeScaleWidth = 1.35f;
// Save presentation
presentation.Save(dataDir + "Adding Picture Frame with Relative Scale_out.pptx", SaveFormat.Pptx);
}
//ExEnd:AddRelativeScaleHeightPictureFrame
}
}
}
<|start_filename|>Plugins/OpenXML/Common Features/Count the number of Slides/OpenXML Presentation/Program.cs<|end_filename|>
// Copyright (c) Aspose 2002-2014. All Rights Reserved.
using DocumentFormat.OpenXml.Packaging;
using System;
using System.Linq;
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\..\Sample Files\";
string FileName = FilePath + "Count the number of slides.pptx";
Console.WriteLine("Number of slides = {0}",
CountSlides(FileName));
Console.ReadKey();
}
// Get the presentation object and pass it to the next CountSlides method.
public static int CountSlides(string presentationFile)
{
// Open the presentation as read-only.
using (PresentationDocument presentationDocument = PresentationDocument.Open(presentationFile, false))
{
// Pass the presentation to the next CountSlide method
// and return the slide count.
return CountSlides(presentationDocument);
}
}
// Count the slides in the presentation.
public static int CountSlides(PresentationDocument presentationDocument)
{
// Check for a null document object.
if (presentationDocument == null)
{
throw new ArgumentNullException("presentationDocument");
}
int slidesCount = 0;
// Get the presentation part of document.
PresentationPart presentationPart = presentationDocument.PresentationPart;
// Get the slide count from the SlideParts.
if (presentationPart != null)
{
slidesCount = presentationPart.SlideParts.Count();
}
// Return the slide count to the previous method.
return slidesCount;
}
}
}
<|start_filename|>Examples/CSharp/Presentations/Opening/GetFileFormat.cs<|end_filename|>
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Presentations.Opening
{
class GetFileFormat
{
public static void Run()
{
//ExStart:GetFileFormat
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_PresentationOpening();
IPresentationInfo info = PresentationFactory.Instance.GetPresentationInfo(dataDir + "HelloWorld.pptx");
switch (info.LoadFormat)
{
case LoadFormat.Pptx:
{
break;
}
case LoadFormat.Unknown:
{
break;
}
}
//ExEnd:GetFileFormat
}
}
}
<|start_filename|>Examples/CSharp/Shapes/CreateSmartArtChildNoteThumbnail.cs<|end_filename|>
using System.Drawing;
using System.Drawing.Imaging;
using Aspose.Slides.SmartArt;
using Aspose.Slides;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Slides for .NET API reference
when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information.
If you do not wish to use NuGet, you can manually download Aspose.Slides for .NET API from http://www.aspose.com/downloads,
install it and then add its reference to this project. For any issues, questions or suggestions
please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Slides.Examples.CSharp.Shapes
{
class CreateSmartArtChildNoteThumbnail
{
public static void Run()
{
//ExStart:CreateSmartArtChildNoteThumbnail
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Shapes();
// Instantiate Presentation class that represents the PPTX file
Presentation pres = new Presentation();
// Add SmartArt
ISmartArt smart = pres.Slides[0].Shapes.AddSmartArt(10, 10, 400, 300, SmartArtLayoutType.BasicCycle);
// Obtain the reference of a node by using its Index
ISmartArtNode node = smart.Nodes[1];
// Get thumbnail
Bitmap bmp = node.Shapes[0].GetThumbnail();
// Save thumbnail
bmp.Save(dataDir + "SmartArt_ChildNote_Thumbnail_out.jpeg", ImageFormat.Jpeg);
//ExEnd:CreateSmartArtChildNoteThumbnail
}
}
}
<|start_filename|>Examples/CSharp/Charts/NumberFormat.cs<|end_filename|>
using System.IO;
using Aspose.Slides;
using Aspose.Slides.Charts;
using Aspose.Slides.Export;
namespace Aspose.Slides.Examples.CSharp.Charts
{
public class NumberFormat
{
public static void Run()
{
//ExStart:NumberFormat
// The path to the documents directory.
string dataDir = RunExamples.GetDataDir_Charts();
// Create directory if it is not already present.
bool IsExists = System.IO.Directory.Exists(dataDir);
if (!IsExists)
System.IO.Directory.CreateDirectory(dataDir);
// Instantiate the presentation// Instantiate the presentation
Presentation pres = new Presentation();
// Access the first presentation slide
ISlide slide = pres.Slides[0];
// Adding a defautlt clustered column chart
IChart chart = slide.Shapes.AddChart(ChartType.ClusteredColumn, 50, 50, 500, 400);
// Accessing the chart series collection
IChartSeriesCollection series = chart.ChartData.Series;
// Setting the preset number format
// Traverse through every chart series
foreach (ChartSeries ser in series)
{
// Traverse through every data cell in series
foreach (IChartDataPoint cell in ser.DataPoints)
{
// Setting the number format
cell.Value.AsCell.PresetNumberFormat = 10; //0.00%
}
}
// Saving presentation
pres.Save(dataDir + "PresetNumberFormat_out.pptx", SaveFormat.Pptx);
//ExEnd:NumberFormat
}
}
} | MrYangPan/Aspose.Slides-for-.NET-code |
<|start_filename|>osu.Server.DifficultyCalculator/Commands/BeatmapsStringCommand.cs<|end_filename|>
// Copyright (c) ppy P<NAME> <<EMAIL>>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System.Collections.Generic;
using System.Linq;
using McMaster.Extensions.CommandLineUtils;
namespace osu.Server.DifficultyCalculator.Commands
{
[Command("beatmapsstring", Description = "A compatibility mode which accepts a comma-separated list of beatmap ids.")]
public class BeatmapsStringCommand : CalculatorCommand
{
[Argument(0, "beatmaps", Description = "A comma-separated list of beatmap ids.")]
public string Beatmaps { get; set; }
protected override IEnumerable<int> GetBeatmaps() => Beatmaps.Split(',').Select(int.Parse);
}
}
<|start_filename|>osu.Server.DifficultyCalculator/Commands/AllCommand.cs<|end_filename|>
// Copyright (c) p<NAME> <<EMAIL>>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System.Collections.Generic;
using System.Linq;
using Dapper;
using McMaster.Extensions.CommandLineUtils;
namespace osu.Server.DifficultyCalculator.Commands
{
[Command(Name = "all", Description = "Calculates the difficulty of all beatmaps in the database.")]
public class AllCommand : CalculatorCommand
{
[Option(CommandOptionType.NoValue, Template = "-r|--ranked", Description = "Only calculate difficulty for ranked/approved/qualified/loved maps.")]
public bool RankedOnly { get; set; }
protected override IEnumerable<int> GetBeatmaps()
{
using (var conn = Database.GetSlaveConnection())
{
if (conn == null)
return Enumerable.Empty<int>();
var condition = CombineSqlConditions(
RankedOnly ? "`approved` >= 1" : null,
"`deleted_at` IS NULL"
);
return conn.Query<int>($"SELECT `beatmap_id` FROM `osu_beatmaps` {condition}");
}
}
}
}
<|start_filename|>osu.Server.DifficultyCalculator/Program.cs<|end_filename|>
// Copyright (c) p<NAME> <<EMAIL>>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System.Net;
using McMaster.Extensions.CommandLineUtils;
using osu.Game.Beatmaps.Formats;
using osu.Server.DifficultyCalculator.Commands;
namespace osu.Server.DifficultyCalculator
{
[Command]
[Subcommand(typeof(AllCommand))]
[Subcommand(typeof(FilesCommand))]
[Subcommand(typeof(BeatmapsCommand))]
[Subcommand(typeof(SinceCommand))]
[Subcommand(typeof(BeatmapsStringCommand))]
public class Program
{
public static void Main(string[] args)
{
LegacyDifficultyCalculatorBeatmapDecoder.Register();
ServicePointManager.DefaultConnectionLimit = 128;
CommandLineApplication.Execute<Program>(args);
}
public int OnExecute(CommandLineApplication app)
{
app.ShowHelp();
return 1;
}
}
}
| ChingCdesu/osu-performance-calculator |
<|start_filename|>TWT/static/js/bs-init.js<|end_filename|>
if (window.innerWidth < 768) {
[].slice.call(document.querySelectorAll('[data-bss-disabled-mobile]')).forEach(function (elem) {
elem.classList.remove('animated');
elem.removeAttribute('data-bss-hover-animate');
elem.removeAttribute('data-aos');
});
}
document.addEventListener('DOMContentLoaded', function() {
AOS.init();
var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bss-tooltip]'));
var tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) {
return new bootstrap.Tooltip(tooltipTriggerEl);
})
}, false);
<|start_filename|>Dockerfile<|end_filename|>
FROM python:3.7-buster
RUN apt-get update && apt-get install nginx -y --no-install-recommends
COPY nginx.default /etc/nginx/sites-available/default
RUN ln -sf /dev/stdout /var/log/nginx/access.log \
&& ln -sf /dev/stderr /var/log/nginx/error.log
RUN mkdir -p /app
RUN mkdir -p /app/pip_cache
WORKDIR /app
COPY requirements.txt start-server.sh /app/
RUN pip3 install -r requirements.txt
COPY . /app/
RUN chown -R www-data:www-data /app
EXPOSE 8020
STOPSIGNAL SIGTERM
CMD ["sh","start-server.sh"]
<|start_filename|>TWT/templates/base/navbar.html<|end_filename|>
{% load socialaccount %}
{% load static %}
<style>
.dropdown-menu {
background-color: #384757;
}
.dropdown-item {
color: #9aaab5;
}
.dropdown-item:hover {
background-color: #384757;
color: white;
transition: all 0.2s ease;
}
.nav-link{
transition: all 0.2s ease;
}
.alert {
position: relative;
z-index: 999; /* temporary fix */
opacity: 1;
visibility: visible;
transform: translateX(0px);
transition: visibility 0s, opacity 250ms, transform 250ms;
}
.alert.hide {
position: relative;
opacity: 0;
visibility: hidden;
transition: visibility 0s 250ms, opacity 250ms, transform 250ms;
}
</style>
<script>
var m = document.getElementsByClassName("alert");
setTimeout(function(){
for (var i=0; i<m.length; i++) {
m[i].classList.add('hide');
}
}, 5000);
</script>
<nav
class="navbar navbar-dark navbar-expand-md navigation-clean-button"
style="background: #232d3b"
>
<div class="container">
<a href="{% url 'home:home' %}">
<img
class="rounded-circle"
src="{% static 'images/cropped-Tech-With-TimXL-192x192.png' %}"
style="opacity: 1; width: 66; height: 66; border-radius: 60"
width="66"
height="66"
/></a>
<button
data-bs-toggle="collapse"
class="navbar-toggler"
data-bs-target="#navcol-1"
>
<span class="visually-hidden">Toggle navigation</span
><span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse text-start" id="navcol-1">
<ul class="navbar-nav text-start mx-auto">
<li class="nav-item"><a class="nav-link" href="{% url 'home:home' %}">Home</a></li>
<li class="nav-item"><a class="nav-link" href="{% url 'timathon:Home' %}">Timathon</a></li>
{% if is_staff %}
<li class="nav-item">
<a class="nav-link" href="{% url 'home:unreleased' %}">Unreleased</a>
</li>
{% endif %}
<li class="nav-item dropdown">
<a
class="dropdown-toggle nav-link"
aria-expanded="false"
data-bs-toggle="dropdown"
href="#"
>More</a
>
<div class="dropdown-menu">
{% for page in custom_pages %}
{% if page.public %}
{% if page.only_staff %}
{% if is_staff %}
<a class="dropdown-item" href="{% url 'home:custom view' page.linkName %}">{{ page.page_menu_name }}</a>
{% endif %}
{% else %}
<a class="dropdown-item" href="{% url 'home:custom view' page.linkName %}">{{ page.page_menu_name }}</a>
{% endif %}
{% endif %}
{% endfor %}
</div>
</li>
</ul>
<span class="navbar-text actions">
{% if user.is_authenticated %}
<a href="{% url 'home:logout' %}">
<button
class="btn default-btn"
id="log-in-out-btn"
type="button"
>
<img
class="rounded-circle"
width="32"
height="32"
src="{{ avatar_url }}"
style="margin-right: 5px"
/>Log out
</button>
</a>
{% else %}
<a href="{% provider_login_url 'discord' %}">
<button
class="btn default-btn"
id="log-in-out-btn"
type="button"
><i class="fab fa-discord"
style="transform: scale(1.54); padding: 11px"
></i
>Log in
</button>
</a>
{% endif %}
</span>
</div>
</div>
</nav>
<div class="container">
{% if messages %}
<div class="row">
<div class="col-sm-4 col-sm-offset-3" style="position: absolute; right:0px; top: 100px">
{% for message in messages %}
<div class="alert alert-{{ message.tags }} alert-text alert-dismissible" role="alert">
<button type="button" class="btn-close" aria-label="Close" data-bs-dismiss="alert"></button>
<strong class="alert-text">{% if message.level == DEFAULT_MESSAGE_LEVELS.ERROR %}Error{% else %}
{{ message.tags|title }}{% endif %}!</strong>
<p class="alert-text">{{ message }}</p>
</div>
{% endfor %}
</div>
</div>
{% endif %}
</div>
<|start_filename|>TWT/templates/challenges/unreleased.html<|end_filename|>
{% extends 'base/base.html' %}
{% load socialaccount %}
{% load static %}
{% load martortags %}
{% block title %}Unreleased{% endblock %}
<link href="{% static 'plugins/css/resizable.min.css' %}" type="text/css" media="all" rel="stylesheet" />
<link href="{% static 'plugins/css/ace.min.css' %}" type="text/css" media="all" rel="stylesheet" />
<link href="{% static 'martor/css/martor.bootstrap.min.css' %}" type="text/css" media="all" rel="stylesheet" />
{% block content %}
{% include 'base/navbar.html' %}
<div class="h-100 p-5 bg-light border rounded-3" style="margin-top: 2%; margin-left: 4%; margin-right: 4%">
<h1 class="display-4">Unreleased Challenges</h1>
<ul class="nav nav-pills flex-column flex-sm-row" id="myTab" role="tablist">
<li class="nav-item" role="presentation">
<a class="flex-sm-fill text-sm-center nav-link" id="unreleased-tab" data-toggle="tab" href="#unreleased" role="tab" aria-controls="settings" aria-selected="false"></a>
</li>
</ul>
<br>
<div class="tab-content">
<div class="tab-pane fade show active" id="unreleased" role="tabpanel" aria-labelledby="unreleased-tab">
{% for challenge in unreleased_challenges.weekly_challenges %}
<h5 class="challenge-title">Weekly Challenges</h5>
{% if forloop.first %}<div class="row">{% endif %}
<div class="col-sm-6">
<div class="card">
<div class="card-body">
<h1 class="card-title">{{ challenge.title }}</h1>
<div class="card-text text-muted">{{ challenge.short_desc }}</div>
<hr class="my-4">
<div class="card-text">{{ challenge.description|safe_markdown }}</div>
<hr class="my-4">
<div class="card-text">{{ challenge.rules }}</div>
</div>
<div class="card-footer">
<a href="{# url 'home:view' challenge.id #}#">
<button class="btn btn-primary">Start</button>
</a>
</div>
</div>
</div>
{% if forloop.counter|divisibleby:2 %}</div><div class="row" style="margin-top: 10px;">{% endif %}
{% if forloop.last %}</div>{% endif %}
{% endfor %}
{% if unreleased_challenges.weekly_challenges %}
<br>
<hr>
{% endif %}
{% for challenge in unreleased_challenges.monthly_challenges %}
<h5 class="challenge-title">Monthly Challenges</h5>
{% if forloop.first %}<div class="row">{% endif %}
<div class="col-sm-6">
<div class="card">
<div class="card-body">
<h1 class="card-title">{{ challenge.title }}</h1>
<div class="card-text text-muted">{{ challenge.short_desc }}</div>
<hr class="my-4">
<div class="card-text">{{ challenge.description|safe_markdown }}</div>
<hr class="my-4">
<div class="card-text">{{ challenge.rules }}</div>
</div>
<div class="card-footer">
<a href="{% url 'home:delete' challenge.id %}">
<button class="btn btn-danger">Delete</button>
</a>
<a href="{% url 'home:start' challenge.id %}">
<button class="btn btn-primary">Start</button>
</a>
</div>
</div>
</div>
{% if forloop.counter|divisibleby:2 %}</div><div class="row" style="margin-top: 10px;">{% endif %}
{% if forloop.last %}</div>{% endif %}
{% endfor %}
</div>
</div>
</div>
{% endblock %} | Tech-With-Tim/old_site |
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/spring/SpringUtils.java<|end_filename|>
/*
* Copyright (C) 2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package net.arin.rdap_bootstrap.spring;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.context.ApplicationContext;
public abstract class SpringUtils
{
private static ApplicationContext ctx;
public static BeanPostProcessor createInitBean( ApplicationContext ctx )
{
init( ctx );
AppProperties.updateResolver( ctx.getEnvironment() );
return new BeanPostProcessor()
{
};
}
public static void init( ApplicationContext ctx )
{
if ( ctx == null )
{
throw new IllegalArgumentException( "Application context cannot be null" );
}
else
{
SpringUtils.ctx = ctx;
}
}
}
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/spring/AppProperties.java<|end_filename|>
/*
* Copyright (C) 2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package net.arin.rdap_bootstrap.spring;
import org.springframework.core.env.PropertyResolver;
import org.springframework.core.env.StandardEnvironment;
import java.io.File;
import java.math.BigDecimal;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.function.Function;
import java.util.stream.Collectors;
public abstract class AppProperties
{
protected AppProperties()
{
}
public static final String PLACE_VALUE_HERE = "***PLACE_VALUE_HERE***";
private static final boolean allowForDefaultValues = true;
private static PropertyResolver resolver = new StandardEnvironment();
public static void updateResolver( PropertyResolver resolver )
{
if ( resolver == null )
{
throw new IllegalArgumentException( "Resolver cannot be null" );
}
AppProperties.resolver = resolver;
}
public static String getProperty( String name )
{
return resolver.containsProperty( name ) ? resolver.getProperty( name ) : null;
}
public static String getProperty( String name, String defaultValue )
{
return resolver.containsProperty( name ) ? resolver.getProperty( name ) : defaultValue;
}
public static void bind( String name, Object value )
{
System.setProperty( name, value.toString() );
}
public static void unbind( String name )
{
System.clearProperty( name );
}
public static Integer lookupInteger( String name )
{
return lookup( Integer.class, name );
}
public static Long lookupLong( String name )
{
return lookup( Long.class, name );
}
public static Long lookupLong( String name, long defaultValue )
{
return lookup( Long.class, name, defaultValue );
}
public static BigDecimal lookupBigDecimal( String name )
{
return lookup( BigDecimal.class, name );
}
public static String lookupString( String name )
{
return lookup( String.class, name );
}
public static Boolean lookupBoolean( String name )
{
return lookup( Boolean.class, name );
}
public static Integer lookupInteger( String name, int defaultValue )
{
return lookup( Integer.class, name, defaultValue );
}
public static String lookupString( String name, String defaultValue )
{
return lookup( String.class, name, defaultValue );
}
public static String lookupDirectory( String name )
{
String directory = lookupString( name );
return directory.endsWith( File.separator ) ? directory : directory + File.separator;
}
public static Boolean lookupBoolean( String name, Boolean defaultValue )
{
return lookup( Boolean.class, name, defaultValue );
}
public static <T> T lookup( Class<T> clazz, String name )
{
String value = getProperty( name, false, allowForDefaultValues );
return parseValue( value, clazz, name );
}
public static <T> T lookup( Class<T> clazz, String name, T defaultValue )
{
String value = getProperty( name, true, allowForDefaultValues );
if ( value == null )
{
return defaultValue;
}
return parseValue( value, clazz, name );
}
public static <T> T lookupForceAllowDefault( Class<T> clazz, String name, T defaultValue )
{
String value = getProperty( name, true, true );
if ( value == null )
{
return defaultValue;
}
return parseValue( value, clazz, name );
}
private static String getProperty( String name, boolean nullable, boolean allowForDefaultValues )
{
String value = getProperty( name );
if ( value == null )
{
if ( !nullable )
{
throw new RuntimeException( "System property '" + name + "' not found." );
}
if ( !allowForDefaultValues )
{
throw new RuntimeException( "System property '" + name + "' not found and default values are not allowed." );
}
}
else if ( value.equalsIgnoreCase( PLACE_VALUE_HERE ) )
{
if ( !nullable )
{
throw new RuntimeException( "System property '" + name + "' is '" + PLACE_VALUE_HERE + "' (i.e., not set)" );
}
if ( !allowForDefaultValues )
{
throw new RuntimeException( "System property '" + name + "' is '" + PLACE_VALUE_HERE + "' (i.e., not set) and default values are not allowed." );
}
}
return value;
}
@SuppressWarnings( "unchecked" )
private static <T> T parseValue( String value, Class<T> clazz, String name )
{
if ( clazz == String.class )
{
return ( T ) value;
}
else if ( clazz == Boolean.class )
{
if ( value.equalsIgnoreCase( Boolean.TRUE.toString() ) )
{
return ( T ) Boolean.TRUE;
}
else if ( value.equalsIgnoreCase( Boolean.FALSE.toString() ) )
{
return ( T ) Boolean.FALSE;
}
else
{
throw new RuntimeException( "System property '" + name + "' is not a boolean value" );
}
}
else if ( clazz == Integer.class )
{
return ( T ) Integer.decode( value );
}
else if ( clazz == Long.class )
{
return ( T ) Long.decode( removeEndingL( value ) );
}
else if ( clazz == BigDecimal.class )
{
return ( T ) new BigDecimal( removeEndingL( value ) );
}
else
{
throw new UnsupportedOperationException( "System property of " + clazz + " type is not supported" );
}
}
private static String removeEndingL( String value )
{
String newValue = value.trim();
return newValue.endsWith( "L" ) ? newValue.substring( 0, newValue.length() - 1 ) : newValue;
}
public static SortedMap<String, String> getSystemProperties( String prefix )
{
return System.getProperties().keySet().stream()
.map( Object::toString )
.filter( name -> name.startsWith( prefix ) )
.collect( Collectors.toMap( Function.identity(), AppProperties::getProperty, ( v1, v2 ) -> v1, TreeMap::new ) );
}
}
<|start_filename|>src/test/java/net/arin/rdap_bootstrap/service/TestConstants.java<|end_filename|>
/*
* Copyright (C) 2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
public class TestConstants
{
public final static String AFRINIC_HTTP = "http://rdap.afrinic.net/rdap";
public final static String AFRINIC_HTTPS = "https://rdap.afrinic.net/rdap";
public final static String APNIC_HTTPS = "https://rdap.apnic.net";
public final static String ARIN_HTTP = "http://rdap.arin.net/registry";
public final static String ARIN_HTTPS = "https://rdap.arin.net/registry";
public final static String GOOGLE_HTTPS = "https://www.registry.google/rdap";
public final static String LACNIC_HTTPS = "https://rdap.lacnic.net/rdap";
public final static String RIPE_HTTP = "http://rdap.db.ripe.net";
public final static String RIPE_HTTPS = "https://rdap.db.ripe.net";
public static final String INFO_HTTPS = "https://rdap.afilias.net/rdap/info";
public static final String EXAMPLE_HTTP = "http://example.com";
public static final String EXAMPLE_HTTPS = "https://example.com";
}
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/service/IpV4Bootstrap.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
import java.util.HashMap;
import java.util.Set;
import net.arin.rdap_bootstrap.service.JsonBootstrapFile.ServiceUrls;
import net.arin.rdap_bootstrap.service.ResourceFiles.BootFiles;
import net.ripe.ipresource.IpRange;
import net.ripe.ipresource.UniqueIpResource;
public class IpV4Bootstrap implements JsonBootstrapFile.Handler
{
private volatile HashMap<String, ServiceUrls> allocations = new HashMap<>();
private HashMap<String, ServiceUrls> _allocations;
private ServiceUrls serviceUrls;
private String publication;
private String description;
public void loadData( ResourceFiles resourceFiles ) throws Exception
{
JsonBootstrapFile bsFile = new JsonBootstrapFile();
bsFile.loadData( resourceFiles.getInputStream( BootFiles.V4.getKey() ), this );
}
@Override
public void startServices()
{
_allocations = new HashMap<>();
}
@Override
public void endServices()
{
allocations = _allocations;
}
@Override
public void startService()
{
serviceUrls = new ServiceUrls();
}
@Override
public void endService()
{
// Nothing to do.
}
@Override
public void addServiceEntry( String entry )
{
_allocations.put( entry, serviceUrls );
}
@Override
public void addServiceUrl( String url )
{
serviceUrls.addUrl( url );
}
public ServiceUrls getServiceUrls( String prefix )
{
UniqueIpResource start;
if ( !prefix.contains( "/" ) && prefix.contains( "." ) )
{
// Single host.
start = UniqueIpResource.parse( prefix );
}
else if ( !prefix.contains( "/" ) )
{
// /8 single int behaviour.
try
{
Integer.valueOf( prefix );
start = IpRange.parse( prefix + ".0.0.0/8" ).getStart();
}
catch ( NumberFormatException e )
{
// Network.
start = IpRange.parse( prefix ).getStart();
}
}
else
{
// Network.
start = IpRange.parse( prefix ).getStart();
}
ServiceUrls resultUrl = null;
IpRange resultNetwork = IpRange.parse( "0.0.0.0/0" );
final Set<String> keys = allocations.keySet();
for ( String key : keys )
{
final IpRange network = IpRange.parse( key );
if ( network.contains( start ) && ( resultNetwork.getPrefixLength() < network.getPrefixLength() ) )
{
resultNetwork = network;
resultUrl = allocations.get( key );
}
}
return resultUrl;
}
@Override
public void setPublication( String publication )
{
this.publication = publication;
}
public String getPublication()
{
return publication;
}
public String getDescription()
{
return description;
}
@Override
public void setDescription( String description )
{
this.description = description;
}
}
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/service/RedirectServlet.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.Map.Entry;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicLong;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.arin.rdap_bootstrap.spring.AppProperties;
import net.arin.rdap_bootstrap.Constants;
import net.arin.rdap_bootstrap.json.Notice;
import net.arin.rdap_bootstrap.json.Response;
import net.arin.rdap_bootstrap.service.DefaultBootstrap.Type;
import net.arin.rdap_bootstrap.service.JsonBootstrapFile.ServiceUrls;
import net.arin.rdap_bootstrap.service.ResourceFiles.BootFiles;
import net.arin.rdap_bootstrap.service.Statistics.UrlHits;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.googlecode.ipv6.IPv6Address;
import com.googlecode.ipv6.IPv6Network;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class RedirectServlet extends HttpServlet
{
private final DefaultBootstrap defaultBootstrap = new DefaultBootstrap();
private final DomainBootstrap domainBootstrap = new DomainBootstrap();
private final IpV6Bootstrap ipV6Bootstrap = new IpV6Bootstrap();
private final IpV4Bootstrap ipV4Bootstrap = new IpV4Bootstrap();
private final AsBootstrap asBootstrap = new AsBootstrap();
private final EntityBootstrap entityBootstrap = new EntityBootstrap();
private volatile Statistics statistics;
private ResourceFiles resourceFiles;
// Defaults for system properties.
Boolean matchSchemeOnRedirect = Boolean.FALSE;
Boolean downloadBootstrapFiles = Boolean.FALSE;
long downloadInterval = 86400; // a day
private static final long CHECK_CONFIG_FILES = 60000L; // every 1 minute
private static final Logger logger = LogManager.getLogger( RedirectServlet.class );
@Override
public void init( ServletConfig config ) throws ServletException
{
super.init( config );
if ( config != null )
{
logProperties();
}
statistics = new Statistics();
matchSchemeOnRedirect = AppProperties.lookupBoolean( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY, matchSchemeOnRedirect );
downloadBootstrapFiles = AppProperties.lookupBoolean( Constants.DOWNLOAD_BOOTSTRAP_FILES_PROPERTY, downloadBootstrapFiles );
if ( downloadBootstrapFiles )
{
try
{
DownloadBootstrapFilesTask downloadBootstrapFilesTask = new DownloadBootstrapFilesTask();
if ( config != null )
{
Timer timer = new Timer();
downloadInterval = AppProperties.lookupLong( Constants.DOWNLOAD_INTERVAL_PROPERTY, downloadInterval );
timer.schedule( downloadBootstrapFilesTask, 0L, downloadInterval * 1000L );
}
// Pause for the download to complete before loading the config.
Thread.sleep( 10000L ); // 10 seconds
}
catch ( Exception e )
{
throw new ServletException( e );
}
}
try
{
LoadConfigTask loadConfigTask = new LoadConfigTask();
// Load data initially.
loadConfigTask.loadData();
logger.info( "Initial data load successful" );
if ( config != null )
{
Timer timer = new Timer();
timer.schedule( loadConfigTask, CHECK_CONFIG_FILES, CHECK_CONFIG_FILES );
}
}
catch ( Exception e )
{
throw new ServletException( e );
}
}
protected void serve( UrlHits urlHits, BaseMaker baseMaker, DefaultBootstrap.Type defaultType, String pathInfo,
HttpServletRequest req, HttpServletResponse resp )
throws IOException
{
try
{
ServiceUrls urls = baseMaker.makeBase( pathInfo );
if ( urls == null && defaultType != null )
{
urls = defaultBootstrap.getServiceUrls( defaultType );
urlHits = UrlHits.DEFAULTHITS;
}
if ( urls == null )
{
resp.sendError( HttpServletResponse.SC_NOT_FOUND );
logger.info( pathInfo + " " + resp.getStatus() );
statistics.getTotalMisses().incrementAndGet();
}
else
{
String redirectUrl = getRedirectUrl( req.getScheme(), req.getPathInfo(), urls );
if ( urlHits != null )
{
urlHits.hit( redirectUrl );
}
statistics.getTotalHits().incrementAndGet();
resp.sendRedirect( redirectUrl );
logger.info( pathInfo + " " + resp.getStatus() + " " + redirectUrl );
}
}
catch ( Exception e )
{
resp.sendError( HttpServletResponse.SC_BAD_REQUEST, e.getMessage() );
logger.info( pathInfo + " " + resp.getStatus() );
}
}
String getRedirectUrl( String scheme, String pathInfo, ServiceUrls urls )
{
String redirectUrl;
if ( matchSchemeOnRedirect )
{
if ( scheme.equals( "https" ) && urls.getHttpsUrl() != null )
{
redirectUrl = urls.getHttpsUrl() + pathInfo;
}
else if ( scheme.equals( "http" ) && urls.getHttpUrl() != null )
{
redirectUrl = urls.getHttpUrl() + pathInfo;
}
else
{
redirectUrl = urls.getUrls().get( 0 ) + pathInfo;
}
}
else
{
redirectUrl = urls.getHttpsUrl();
if ( redirectUrl == null )
{
redirectUrl = urls.getHttpUrl();
}
if ( redirectUrl != null )
{
redirectUrl += pathInfo;
}
}
return redirectUrl;
}
@Override
protected void service( HttpServletRequest req, HttpServletResponse resp )
throws IOException
{
if ( req == null )
{
resp.sendError( HttpServletResponse.SC_BAD_REQUEST );
}
else if ( req.getPathInfo() == null )
{
resp.sendError( HttpServletResponse.SC_BAD_REQUEST );
}
else
{
String pathInfo = req.getPathInfo();
if ( pathInfo.startsWith( "/domain/" ) )
{
serve( UrlHits.DOMAINHITS, new MakeDomainBase(), Type.DOMAIN, pathInfo, req, resp );
}
// The /nameserver path leverages the domain bootstrap logic to provide redirection for the nameserver
// queries.
else if ( pathInfo.startsWith( "/nameserver/" ) )
{
serve( UrlHits.NAMESERVERHITS, new MakeNameserverBase(), Type.NAMESERVER, pathInfo, req, resp );
}
else if ( pathInfo.startsWith( "/ip/" ) )
{
serve( UrlHits.IPHITS, new MakeIpBase(), Type.IP, pathInfo, req, resp );
}
else if ( pathInfo.startsWith( "/autnum/" ) )
{
serve( UrlHits.ASHITS, new MakeAutnumBase(), Type.AUTNUM, pathInfo, req, resp );
}
// The /entity path provides redirection for the RIR entity queries.
else if ( pathInfo.startsWith( "/entity/" ) )
{
serve( UrlHits.ENTITYHITS, new MakeEntityBase(), Type.ENTITY, pathInfo, req, resp );
}
// The /help path returns statistics for ARIN's RDAP Bootstrap service.
else if ( pathInfo.startsWith( "/help" ) )
{
resp.setContentType( "application/rdap+json" );
makeHelp( resp.getOutputStream() );
logger.info( pathInfo + " " + resp.getStatus() );
}
else
{
resp.sendError( HttpServletResponse.SC_NOT_FOUND );
logger.info( pathInfo + " " + resp.getStatus() );
}
}
}
public interface BaseMaker
{
ServiceUrls makeBase( String pathInfo );
}
// Domain names.
public ServiceUrls makeDomainBase( String pathInfo )
{
return new MakeDomainBase().makeBase( pathInfo );
}
public class MakeDomainBase implements BaseMaker
{
public ServiceUrls makeBase( String pathInfo )
{
// Strip leading "/domain/".
pathInfo = pathInfo.substring( 8 );
// Strip possible trailing period.
if ( pathInfo.endsWith( "." ) )
{
pathInfo = pathInfo.substring( 0, pathInfo.length() - 1 );
}
if ( pathInfo.endsWith( ".in-addr.arpa" ) )
{
final int bitsPerWord = 8;
final int divisor = 1;
final String delimiter = ".";
String[] words = new String[4];
Arrays.fill( words, "0" );
final String[] _split = pathInfo.split( "\\." );
int n = _split.length - 2;
StringBuilder s = new StringBuilder();
StringBuilder _s = new StringBuilder();
for ( int i = n - 1, j = 1; i >= 0; i--, j++ )
{
_s.append( _split[i] );
words[j / divisor - 1] = _s.toString();
_s = new StringBuilder();
}
// Get the CIDR string (prefix slash prefix length) to query the IPv4 bootstrap.
for ( int i = 0; i < words.length - 1; i++ )
{
s.append( words[i] ).append( delimiter );
}
s.append( words[words.length - 1] );
s.append( "/" ).append( bitsPerWord * n );
return ipV4Bootstrap.getServiceUrls( s.toString() );
}
else if ( pathInfo.endsWith( ".ip6.arpa" ) )
{
String[] labels = pathInfo.split( "\\." );
byte[] bytes = new byte[16];
Arrays.fill( bytes, ( byte ) 0 );
int labelIdx = labels.length - 3;
int byteIdx = 0;
int idxJump = 1;
while ( labelIdx > 0 )
{
char ch = labels[labelIdx].charAt( 0 );
byte value = 0;
if ( ch >= '0' && ch <= '9' )
{
value = ( byte ) ( ch - '0' );
}
else if ( ch >= 'A' && ch <= 'F' )
{
value = ( byte ) ( ch - ( 'A' - 0xaL ) );
}
else if ( ch >= 'a' && ch <= 'f' )
{
value = ( byte ) ( ch - ( 'a' - 0xaL ) );
}
if ( idxJump % 2 == 1 )
{
bytes[byteIdx] = ( byte ) ( value << 4 );
}
else
{
bytes[byteIdx] = ( byte ) ( bytes[byteIdx] + value );
}
labelIdx--;
idxJump++;
if ( idxJump % 2 == 1 )
{
byteIdx++;
}
}
return ipV6Bootstrap.getServiceUrls( IPv6Address.fromByteArray( bytes ) );
}
// else a forward domain
String[] labels = pathInfo.split( "\\." );
return domainBootstrap.getServiceUrls( labels[labels.length - 1] );
}
}
// Nameservers. Only for forward domains.
public ServiceUrls makeNameserverBase( String pathInfo )
{
return new MakeNameserverBase().makeBase( pathInfo );
}
public class MakeNameserverBase implements BaseMaker
{
public ServiceUrls makeBase( String pathInfo )
{
// Strip leading "/nameserver/".
pathInfo = pathInfo.substring( 12 );
// Strip possible trailing period.
if ( pathInfo.endsWith( "." ) )
{
pathInfo = pathInfo.substring( 0, pathInfo.length() - 1 );
}
String[] labels = pathInfo.split( "\\." );
return domainBootstrap.getServiceUrls( labels[labels.length - 1] );
}
}
// IP addresses.
public ServiceUrls makeIpBase( String pathInfo )
{
return new MakeIpBase().makeBase( pathInfo );
}
public class MakeIpBase implements BaseMaker
{
public ServiceUrls makeBase( String pathInfo )
{
// Strip leading "/ip/".
pathInfo = pathInfo.substring( 4 );
if ( !pathInfo.contains( ":" ) ) // is not IPv6
{
return ipV4Bootstrap.getServiceUrls( pathInfo );
}
// else
IPv6Address addr;
if ( !pathInfo.contains( "/" ) )
{
addr = IPv6Address.fromString( pathInfo );
return ipV6Bootstrap.getServiceUrls( addr );
}
else
{
IPv6Network net = IPv6Network.fromString( pathInfo );
return ipV6Bootstrap.getServiceUrls( net );
}
}
}
// AS numbers.
public ServiceUrls makeAutnumBase( String pathInfo )
{
return new MakeAutnumBase().makeBase( pathInfo );
}
public class MakeAutnumBase implements BaseMaker
{
public ServiceUrls makeBase( String pathInfo )
{
return asBootstrap.getServiceUrls( pathInfo.split( "/" )[2] );
}
}
// Entities.
public ServiceUrls makeEntityBase( String pathInfo )
{
return new MakeEntityBase().makeBase( pathInfo );
}
public class MakeEntityBase implements BaseMaker
{
public ServiceUrls makeBase( String pathInfo )
{
int i = pathInfo.lastIndexOf( '-' );
if ( i != -1 && i + 1 < pathInfo.length() )
{
// Use the RIR label in the entity handle to get the redirection URL.
return entityBootstrap.getServiceUrls( pathInfo.substring( i + 1 ) );
}
// else
return null;
}
}
// Statistics.
private Notice makeStatsNotice( Statistics.UrlHits stats )
{
Notice notice = new Notice();
notice.setTitle( stats.getTitle() );
ArrayList<String> description = new ArrayList<>();
Set<Entry<String, AtomicLong>> entrySet = stats.getEntrySet();
if ( entrySet.size() != 0 )
{
for ( Entry<String, AtomicLong> entry : entrySet )
{
description.add( String.format( "%-5d = %25s", entry.getValue().get(), entry.getKey() ) );
}
}
else
{
description.add( "Zero queries." );
}
notice.setDescription( description.toArray( new String[0] ) );
return notice;
}
public void makeHelp( OutputStream outputStream ) throws IOException
{
Response response = new Response( null );
ArrayList<Notice> notices = new ArrayList<>();
// Do statistics.
for ( Statistics.UrlHits stats : Statistics.UrlHits.values() )
{
notices.add( makeStatsNotice( stats ) );
}
// Totals.
Notice notice = new Notice();
notice.setTitle( "Totals" );
String[] description = new String[2];
description[0] = String.format( "Hits = %5d", statistics.getTotalHits().get() );
description[1] = String.format( "Misses = %5d", statistics.getTotalMisses().get() );
notice.setDescription( description );
notices.add( notice );
// Modified dates for various bootstrap files. Done this way so that Publication dates can be published as well.
notices.add( createPublicationDateNotice( "Default",
resourceFiles.getLastModified( BootFiles.DEFAULT.getKey() ),
defaultBootstrap.getPublication() ) );
notices.add( createPublicationDateNotice( "Domain",
resourceFiles.getLastModified( BootFiles.DOMAIN.getKey() ),
domainBootstrap.getPublication() ) );
notices.add( createPublicationDateNotice( "IPv4",
resourceFiles.getLastModified( BootFiles.V4.getKey() ),
ipV4Bootstrap.getPublication() ) );
notices.add( createPublicationDateNotice( "IPv6",
resourceFiles.getLastModified( BootFiles.V6.getKey() ),
ipV6Bootstrap.getPublication() ) );
notices.add( createPublicationDateNotice( "AS",
resourceFiles.getLastModified( BootFiles.AS.getKey() ),
asBootstrap.getPublication() ) );
notices.add( createPublicationDateNotice( "Entity",
resourceFiles.getLastModified( BootFiles.ENTITY.getKey() ),
entityBootstrap.getPublication() ) );
response.setNotices( notices );
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion( Include.NON_EMPTY );
ObjectWriter writer = mapper.writer( new DefaultPrettyPrinter() );
writer.writeValue( outputStream, response );
}
private Notice createPublicationDateNotice( String file, long lastModified,
String publicationDate )
{
Notice bootFileModifiedNotice = new Notice();
bootFileModifiedNotice.setTitle( String.format( "%s Bootstrap File Modified and Published Dates", file ) );
String[] bootFileModifiedDescription = new String[2];
// Date format as 2015-05-15T17:04:06-0500 (Y-m-d'T'H:M:Sz).
bootFileModifiedDescription[0] = String.format( "%1$tFT%1$tT%1$tz", lastModified );
bootFileModifiedDescription[1] = publicationDate;
bootFileModifiedNotice.setDescription( bootFileModifiedDescription );
return bootFileModifiedNotice;
}
private class LoadConfigTask extends TimerTask
{
private boolean isModified( long currentTime, long lastModified )
{
return ( currentTime - CHECK_CONFIG_FILES ) < lastModified;
}
@Override
public void run()
{
boolean load = false;
long currentTime = System.currentTimeMillis();
for ( BootFiles bootFiles : BootFiles.values() )
{
if ( isModified( currentTime, resourceFiles.getLastModified( bootFiles.getKey() ) ) )
{
logger.info( String.format( "%s was last modified at %s", bootFiles.getKey(),
new Date( resourceFiles.getLastModified( bootFiles.getKey() ) ) ) );
load = true;
}
}
if ( load )
{
try
{
loadData();
}
catch ( Exception e )
{
logger.warn( "Problem loading data", e );
}
}
}
public void loadData() throws Exception
{
logger.info( "Loading resource files" );
resourceFiles = new ResourceFiles();
defaultBootstrap.loadData( resourceFiles );
domainBootstrap.loadData( resourceFiles );
ipV4Bootstrap.loadData( resourceFiles );
ipV6Bootstrap.loadData( resourceFiles );
asBootstrap.loadData( resourceFiles );
entityBootstrap.loadData( resourceFiles );
}
}
private static class DownloadBootstrapFilesTask extends TimerTask
{
@Override
public void run()
{
try
{
logger.info( "Downloading files from IANA RDAP Bootstrap registry" );
String downloadDir = AppProperties.getProperty( Constants.DOWNLOAD_DIRECTORY_PROPERTY );
if ( downloadDir == null )
{
throw new IOException( "Specify download directory" );
}
Path downloadDirPath = Paths.get( downloadDir );
if ( !downloadDirPath.isAbsolute() )
{
throw new IOException( "Specify absolute path of the download directory: " + downloadDir );
}
Files.createDirectories( downloadDirPath );
downloadFileSafely( AppProperties.getProperty( Constants.DOWNLOAD_ASN_FILE_URL_PROPERTY ), downloadDir );
downloadFileSafely( AppProperties.getProperty( Constants.DOWNLOAD_DOMAIN_FILE_URL_PROPERTY ), downloadDir );
downloadFileSafely( AppProperties.getProperty( Constants.DOWNLOAD_IPV4_FILE_URL_PROPERTY ), downloadDir );
downloadFileSafely( AppProperties.getProperty( Constants.DOWNLOAD_IPV6_FILE_URL_PROPERTY ), downloadDir );
}
catch ( IOException e )
{
logger.warn( "Problem downloading files from IANA RDAP Bootstrap registry", e );
}
}
private void downloadFileSafely( String downloadUrlStr, String downloadDir )
throws IOException
{
logger.info( "Downloading " + downloadUrlStr );
URL downloadUrl = new URL( downloadUrlStr );
String fileName = FilenameUtils.getName( downloadUrl.getPath() );
Path filePath = Paths.get( downloadDir + "/" + fileName );
String newFilePathname = downloadDir + "/" + fileName + ".new";
Path newFilePath = Paths.get( newFilePathname );
Path curFilePath = Paths.get( downloadDir + "/" + fileName + ".cur" );
Path oldFilePath = Paths.get( downloadDir + "/" + fileName + ".old" );
FileUtils.copyURLToFile( downloadUrl, new File( newFilePathname ), 5000, 5000 ); // 10 seconds wait
Files.deleteIfExists( oldFilePath );
if ( Files.exists( curFilePath ) )
{
Files.copy( curFilePath, oldFilePath, StandardCopyOption.REPLACE_EXISTING );
Files.deleteIfExists( filePath );
Files.createSymbolicLink( filePath, oldFilePath );
}
Files.copy( newFilePath, curFilePath, StandardCopyOption.REPLACE_EXISTING );
Files.deleteIfExists( filePath );
Files.createSymbolicLink( filePath, curFilePath );
}
}
private void logProperties()
{
logger.info( "RDAP Bootstrap server properties: " );
logger.info( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY + "=" +
AppProperties.lookupBoolean( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY, matchSchemeOnRedirect ) );
logger.info( Constants.DOWNLOAD_BOOTSTRAP_FILES_PROPERTY + "=" +
AppProperties.lookupBoolean( Constants.DOWNLOAD_BOOTSTRAP_FILES_PROPERTY, downloadBootstrapFiles ) );
logger.info( Constants.DOWNLOAD_ASN_FILE_URL_PROPERTY + "=" +
AppProperties.getProperty( Constants.DOWNLOAD_ASN_FILE_URL_PROPERTY ) );
logger.info( Constants.DOWNLOAD_DOMAIN_FILE_URL_PROPERTY + "=" +
AppProperties.getProperty( Constants.DOWNLOAD_DOMAIN_FILE_URL_PROPERTY ) );
logger.info( Constants.DOWNLOAD_IPV4_FILE_URL_PROPERTY + "=" +
AppProperties.getProperty( Constants.DOWNLOAD_IPV4_FILE_URL_PROPERTY ) );
logger.info( Constants.DOWNLOAD_IPV6_FILE_URL_PROPERTY + "=" +
AppProperties.getProperty( Constants.DOWNLOAD_IPV6_FILE_URL_PROPERTY ) );
logger.info( Constants.DOWNLOAD_DIRECTORY_PROPERTY + "=" +
AppProperties.getProperty( Constants.DOWNLOAD_DIRECTORY_PROPERTY ) );
logger.info( Constants.DOWNLOAD_INTERVAL_PROPERTY + "=" +
AppProperties.lookupLong( Constants.DOWNLOAD_INTERVAL_PROPERTY, downloadInterval ) );
for ( BootFiles bootFiles : BootFiles.values() )
{
String property = Constants.PROPERTY_PREFIX + "bootfile." + bootFiles.getKey();
logger.info( property + "=" + AppProperties.getProperty( property ) );
}
}
}
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/service/Statistics.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package net.arin.rdap_bootstrap.service;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
public class Statistics
{
private static class LruMap<String, AtomicLong> extends LinkedHashMap<String, AtomicLong>
{
private final int maxEntries;
public LruMap( int maxEntries )
{
this.maxEntries = maxEntries;
}
@Override
protected boolean removeEldestEntry( Entry<String, AtomicLong> entry )
{
return super.size() > maxEntries;
}
}
public enum UrlHits
{
DEFAULTHITS( "Default Hits" ),
DOMAINHITS( "Domain Hits" ),
NAMESERVERHITS( "Nameserver Hits" ),
IPHITS( "IP Hits" ),
ASHITS( "Autnum Hits" ),
ENTITYHITS( "Entity Hits" );
private final Map<String, AtomicLong> hitsMap = Collections.synchronizedMap( new LruMap<>( 100 ) );
private final String title;
public void hit( String url )
{
AtomicLong counter = hitsMap.get( url );
if ( counter == null )
{
hitsMap.put( url, new AtomicLong( 1 ) );
}
else
{
counter.incrementAndGet();
}
}
public Set<Entry<String, AtomicLong>> getEntrySet()
{
return hitsMap.entrySet();
}
public String getTitle()
{
return title;
}
UrlHits( String title )
{
this.title = title;
}
}
private final AtomicLong totalHits = new AtomicLong( 0 );
private final AtomicLong totalMisses = new AtomicLong( 0 );
public AtomicLong getTotalHits()
{
return totalHits;
}
public AtomicLong getTotalMisses()
{
return totalMisses;
}
}
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/service/IpV6Bootstrap.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
import java.util.Map;
import java.util.TreeMap;
import net.arin.rdap_bootstrap.service.JsonBootstrapFile.ServiceUrls;
import net.arin.rdap_bootstrap.service.ResourceFiles.BootFiles;
import com.googlecode.ipv6.IPv6Address;
import com.googlecode.ipv6.IPv6Network;
public class IpV6Bootstrap implements JsonBootstrapFile.Handler
{
private static class HighBitsRangeInfo
{
private final Long highBitsStart;
private final Long highBitsEnd;
private final ServiceUrls serviceUrls;
public HighBitsRangeInfo( Long highBitsStart, Long highBitsEnd, ServiceUrls serviceUrls )
{
this.highBitsStart = highBitsStart;
this.highBitsEnd = highBitsEnd;
this.serviceUrls = serviceUrls;
}
public Long getHighBitsStart()
{
return highBitsStart;
}
public Long getHighBitsEnd()
{
return highBitsEnd;
}
public ServiceUrls getServiceUrls()
{
return serviceUrls;
}
}
private volatile TreeMap<Long, HighBitsRangeInfo> allocations = new TreeMap<>();
private TreeMap<Long, HighBitsRangeInfo> _allocations;
private ServiceUrls serviceUrls;
private String publication;
private String description;
@Override
public void startServices()
{
_allocations = new TreeMap<>();
}
@Override
public void endServices()
{
allocations = _allocations;
}
@Override
public void startService()
{
serviceUrls = new ServiceUrls();
}
@Override
public void endService()
{
// Nothing to do.
}
@Override
public void addServiceEntry( String entry )
{
IPv6Network v6net = IPv6Network.fromString( entry );
long key = v6net.getFirst().getHighBits();
int prefixLength = v6net.getNetmask().asPrefixLength();
_allocations.put( key, new HighBitsRangeInfo( key, key + ( long ) ( Math.pow( 2, 64 - prefixLength ) - 1 ), serviceUrls ) );
}
@Override
public void addServiceUrl( String url )
{
serviceUrls.addUrl( url );
}
public void loadData( ResourceFiles resourceFiles )
throws Exception
{
JsonBootstrapFile bsFile = new JsonBootstrapFile();
bsFile.loadData( resourceFiles.getInputStream( BootFiles.V6.getKey() ), this );
}
public ServiceUrls getServiceUrls( long prefixStart, long prefixEnd )
{
ServiceUrls retval = null;
Map.Entry<Long, HighBitsRangeInfo> entry = allocations.floorEntry( prefixStart );
if ( entry != null )
{
HighBitsRangeInfo highBitsRangeInfo = entry.getValue();
if ( highBitsRangeInfo.getHighBitsStart() <= prefixStart && prefixEnd <= highBitsRangeInfo.getHighBitsEnd() )
{
retval = highBitsRangeInfo.getServiceUrls();
}
}
return retval;
}
public ServiceUrls getServiceUrls( IPv6Address addr )
{
return getServiceUrls( addr.getHighBits(), addr.getHighBits() );
}
public ServiceUrls getServiceUrls( IPv6Network net )
{
long prefixStart = net.getFirst().getHighBits();
int prefixLength = net.getNetmask().asPrefixLength();
long prefixEnd = prefixStart + ( long ) ( Math.pow( 2, 64 - prefixLength ) - 1 );
return getServiceUrls( prefixStart, prefixEnd );
}
@Override
public void setPublication( String publication )
{
this.publication = publication;
}
public String getPublication()
{
return publication;
}
public String getDescription()
{
return description;
}
@Override
public void setDescription( String description )
{
this.description = description;
}
}
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/Constants.java<|end_filename|>
/*
* Copyright (C) 2015-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package net.arin.rdap_bootstrap;
/**
* Holds app-wide constants.
*/
public class Constants
{
public final static String PROPERTY_PREFIX = "arin.rdapbootstrap.";
public final static String MATCH_SCHEME_ON_REDIRECT_PROPERTY = "arin.rdapbootstrap.match_scheme_on_redirect";
public final static String DOWNLOAD_BOOTSTRAP_FILES_PROPERTY = "arin.rdapbootstrap.download_bootstrap_files";
public final static String DOWNLOAD_ASN_FILE_URL_PROPERTY = "arin.rdapbootstrap.download_asn_file_url";
public final static String DOWNLOAD_DOMAIN_FILE_URL_PROPERTY = "arin.rdapbootstrap.download_domain_file_url";
public final static String DOWNLOAD_IPV4_FILE_URL_PROPERTY = "arin.rdapbootstrap.download_ipv4_file_url";
public final static String DOWNLOAD_IPV6_FILE_URL_PROPERTY = "arin.rdapbootstrap.download_ipv6_file_url";
public final static String DOWNLOAD_DIRECTORY_PROPERTY = "arin.rdapbootstrap.download_directory";
public final static String DOWNLOAD_INTERVAL_PROPERTY = "arin.rdapbootstrap.download_interval";
}
<|start_filename|>src/test/java/net/arin/rdap_bootstrap/service/IpV6BootstrapTest.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
import com.googlecode.ipv6.IPv6Address;
import com.googlecode.ipv6.IPv6Network;
import org.junit.Test;
import static junit.framework.Assert.assertEquals;
import static net.arin.rdap_bootstrap.service.TestConstants.AFRINIC_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.APNIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.ARIN_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.LACNIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.RIPE_HTTPS;
import static org.junit.Assert.assertNull;
public class IpV6BootstrapTest
{
@Test
public void testAllocations() throws Exception
{
IpV6Bootstrap v6 = new IpV6Bootstrap();
v6.loadData( new ResourceFiles() );
assertEquals( AFRINIC_HTTP, v6.getServiceUrls( IPv6Network.fromString( "2c00:0000::/12" ) ).getHttpUrl() );
assertEquals( AFRINIC_HTTP, v6.getServiceUrls( IPv6Network.fromString( "2c00:0000::/13" ) ).getHttpUrl() );
assertNull( v6.getServiceUrls( IPv6Network.fromString( "3c00:0000::/12" ) ) );
assertEquals( APNIC_HTTPS, v6.getServiceUrls( IPv6Network.fromString( "2001:0200::/23" ) ).getHttpsUrl() );
assertEquals( ARIN_HTTP, v6.getServiceUrls( IPv6Address.fromString( "fc00:db20:35b:7399::5" ) ).getHttpUrl() );
assertEquals( ARIN_HTTP, v6.getServiceUrls( IPv6Address.fromString( "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b" ) ).getHttpUrl() );
assertEquals( ARIN_HTTP, v6.getServiceUrls( IPv6Address.fromString( "fdf8:f53e:61e4::18" ) ).getHttpUrl() );
assertEquals( ARIN_HTTP, v6.getServiceUrls( IPv6Address.fromString( "fc00:db20:35b:7399::5" ) ).getHttpUrl() );
assertEquals( LACNIC_HTTPS, v6.getServiceUrls( IPv6Address.fromString( "fc00:e968:6179::de52:7100" ) ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, v6.getServiceUrls( IPv6Address.fromString( "fdf8:f53e:61e4::18" ) ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, v6.getServiceUrls( IPv6Address.fromString( "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b" ) ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, v6.getServiceUrls( IPv6Address.fromString( "280f:ffff:ffff:ffff:ffff:ffff:ffff:ffff" ) ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, v6.getServiceUrls( IPv6Network.fromString( "2800:0000::/12" ) ).getHttpsUrl() );
assertEquals( RIPE_HTTPS, v6.getServiceUrls( IPv6Address.fromString( "fdf8:f53e:61e4::18" ) ).getHttpsUrl() );
assertEquals( RIPE_HTTPS, v6.getServiceUrls( IPv6Address.fromString( "2fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b" ) ).getHttpsUrl() );
}
}
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/service/ResourceFiles.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package net.arin.rdap_bootstrap.service;
import net.arin.rdap_bootstrap.spring.AppProperties;
import net.arin.rdap_bootstrap.Constants;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.Properties;
/**
* Manages getting resource files.
*/
public class ResourceFiles
{
public enum BootFiles
{
DEFAULT( "default_bootstrap" ),
DOMAIN( "domain_bootstrap" ),
V4( "v4_bootstrap" ),
V6( "v6_bootstrap" ),
AS( "as_bootstrap" ),
ENTITY( "entity_bootstrap" );
private final String key;
public String getKey()
{
return key;
}
BootFiles( String key )
{
this.key = key;
}
}
private final Properties resourceFiles;
private final HashMap<String, Boolean> isFile;
private static final Logger logger = LogManager.getLogger( ResourceFiles.class );
public ResourceFiles() throws IOException
{
String extFileName = AppProperties.getProperty( Constants.PROPERTY_PREFIX + "resource_files" );
resourceFiles = new Properties();
File file;
if ( extFileName == null )
{
InputStream inputStream = getClass().getResourceAsStream( "/resource_files.properties" );
resourceFiles.load( inputStream );
}
else if ( ( file = new File( extFileName ) ).isFile() )
{
InputStream inputStream = new FileInputStream( file );
resourceFiles.load( inputStream );
}
// Override with explicitly set system properties.
Boolean downloadBootstrapFiles = AppProperties.lookupBoolean( Constants.DOWNLOAD_BOOTSTRAP_FILES_PROPERTY, false );
String downloadDir = AppProperties.getProperty( Constants.DOWNLOAD_DIRECTORY_PROPERTY );
for ( BootFiles bootFiles : BootFiles.values() )
{
String bootfilePropertyName = Constants.PROPERTY_PREFIX + "bootfile." + bootFiles.key;
if ( downloadBootstrapFiles && StringUtils.isNotBlank( downloadDir ) )
{
if ( bootfilePropertyName.contains( BootFiles.AS.key ) )
{
setBootfileProperty( bootfilePropertyName, Constants.DOWNLOAD_ASN_FILE_URL_PROPERTY, downloadDir );
}
else if ( bootfilePropertyName.contains( BootFiles.DOMAIN.key ) )
{
setBootfileProperty( bootfilePropertyName, Constants.DOWNLOAD_DOMAIN_FILE_URL_PROPERTY, downloadDir );
}
else if ( bootfilePropertyName.contains( BootFiles.V4.key ) )
{
setBootfileProperty( bootfilePropertyName, Constants.DOWNLOAD_IPV4_FILE_URL_PROPERTY, downloadDir );
}
else if ( bootfilePropertyName.contains( BootFiles.V6.key ) )
{
setBootfileProperty( bootfilePropertyName, Constants.DOWNLOAD_IPV6_FILE_URL_PROPERTY, downloadDir );
}
}
String value = AppProperties.getProperty( bootfilePropertyName );
if ( value != null && value.length() > 0 )
{
resourceFiles.put( bootFiles.key, value );
}
}
isFile = new HashMap<>();
for ( Entry<Object, Object> entry : resourceFiles.entrySet() )
{
file = new File( entry.getValue().toString() );
isFile.put( entry.getKey().toString(), file.exists() );
}
}
private void setBootfileProperty( String bootfilePropertyName, String downloadUrlPropertyName, String downloadDir )
throws IOException
{
String downloadUrlStr = AppProperties.getProperty( downloadUrlPropertyName );
if ( StringUtils.isNotBlank( downloadUrlStr ) )
{
URL downloadUrl = new URL( downloadUrlStr );
String fileName = FilenameUtils.getName( downloadUrl.getPath() );
Path filePath = Paths.get( downloadDir + "/" + fileName );
System.setProperty( bootfilePropertyName, filePath.toString() );
logger.debug( "Set " + bootfilePropertyName + "=" + AppProperties.getProperty( bootfilePropertyName ) );
}
}
public InputStream getInputStream( String key ) throws FileNotFoundException
{
logger.debug( "Load data from " + resourceFiles.getProperty( key ) );
if ( isFile.get( key ) )
{
File file = new File( resourceFiles.getProperty( key ) );
return new FileInputStream( file );
}
// else
return getClass().getResourceAsStream( resourceFiles.getProperty( key ) );
}
public long getLastModified( String key )
{
if ( !isFile.get( key ) )
{
return System.currentTimeMillis();
}
// else
File file = new File( resourceFiles.getProperty( key ) );
return file.lastModified();
}
}
<|start_filename|>src/main/java/net/arin/rdap_bootstrap/spring/RdapBootstrapApp.java<|end_filename|>
/*
* Copyright (C) 2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package net.arin.rdap_bootstrap.spring;
import javax.servlet.Servlet;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.PropertySource;
@PropertySource( { "classpath:rdap_bootstrap.properties" } )
@SpringBootApplication
public class RdapBootstrapApp
{
public static void main( String[] args )
{
SpringApplication.run( RdapBootstrapApp.class, args );
}
@Bean
public static BeanPostProcessor postProcessor( ConfigurableApplicationContext ctx )
{
return SpringUtils.createInitBean( ctx );
}
@Bean
public static ServletRegistrationBean<Servlet> rdapBootstrapRedirectServlet() throws Exception
{
ServletRegistrationBean<Servlet> registrationBean = new ServletRegistrationBean<>();
registrationBean.setServlet( ( Servlet ) Class.forName( "net.arin.rdap_bootstrap.service.RedirectServlet" ).getConstructor().newInstance() );
registrationBean.addUrlMappings( "/rdapbootstrap/*" );
registrationBean.setLoadOnStartup( 1 );
return registrationBean;
}
}
<|start_filename|>src/test/java/net/arin/rdap_bootstrap/service/AsBootstrapTest.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
import org.junit.Test;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNull;
import static net.arin.rdap_bootstrap.service.TestConstants.AFRINIC_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.AFRINIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.APNIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.ARIN_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.ARIN_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.LACNIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.RIPE_HTTPS;
public class AsBootstrapTest
{
@Test
public void testBootstrap() throws Exception
{
AsBootstrap asBootstrap = new AsBootstrap();
asBootstrap.loadData( new ResourceFiles() );
assertEquals( AFRINIC_HTTP, asBootstrap.getServiceUrls( "36864" ).getHttpUrl() );
assertEquals( AFRINIC_HTTPS, asBootstrap.getServiceUrls( "329727" ).getHttpsUrl() );
assertNull( asBootstrap.getServiceUrls( "4608" ).getHttpUrl() );
assertEquals( APNIC_HTTPS, asBootstrap.getServiceUrls( "4608" ).getHttpsUrl() );
assertEquals( APNIC_HTTPS, asBootstrap.getServiceUrls( "140603" ).getHttpsUrl() );
assertEquals( ARIN_HTTP, asBootstrap.getServiceUrls( "1" ).getHttpUrl() );
assertEquals( ARIN_HTTPS, asBootstrap.getServiceUrls( "399259" ).getHttpsUrl() );
assertNull( asBootstrap.getServiceUrls( "27648" ).getHttpUrl() );
assertEquals( LACNIC_HTTPS, asBootstrap.getServiceUrls( "27648" ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, asBootstrap.getServiceUrls( "271774" ).getHttpsUrl() );
assertNull( asBootstrap.getServiceUrls( "1877" ).getHttpUrl() );
assertEquals( RIPE_HTTPS, asBootstrap.getServiceUrls( "1877" ).getHttpsUrl() );
assertEquals( RIPE_HTTPS, asBootstrap.getServiceUrls( "213403" ).getHttpsUrl() );
assertNull( asBootstrap.getServiceUrls( "4294967294" ) );
}
}
<|start_filename|>src/test/java/net/arin/rdap_bootstrap/service/RedirectServletTest.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
import net.arin.rdap_bootstrap.Constants;
import net.arin.rdap_bootstrap.service.JsonBootstrapFile.ServiceUrls;
import org.junit.Test;
import static junit.framework.Assert.assertEquals;
import static net.arin.rdap_bootstrap.service.TestConstants.APNIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.ARIN_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.EXAMPLE_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.EXAMPLE_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.INFO_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.LACNIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.RIPE_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.RIPE_HTTPS;
import static org.junit.Assert.assertNull;
public class RedirectServletTest
{
@Test
public void testGetRedirectUrlDefault() throws Exception
{
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
ServiceUrls urls = new ServiceUrls();
urls.addUrl( EXAMPLE_HTTP );
urls.addUrl( EXAMPLE_HTTPS );
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "http", "/bar", urls ) );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "https", "/bar", urls ) );
}
@Test
public void testGetRedirectUrlDefaultOnlyHttp() throws Exception
{
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
ServiceUrls urls = new ServiceUrls();
urls.addUrl( EXAMPLE_HTTP );
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( EXAMPLE_HTTP + "/bar", servlet.getRedirectUrl( "http", "/bar", urls ) );
assertEquals( EXAMPLE_HTTP + "/bar", servlet.getRedirectUrl( "https", "/bar", urls ) );
}
@Test
public void testGetRedirectUrlDefaultOnlyHttps() throws Exception
{
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
ServiceUrls urls = new ServiceUrls();
urls.addUrl( EXAMPLE_HTTPS );
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "http", "/bar", urls ) );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "https", "/bar", urls ) );
}
@Test
public void testGetRedirectUrlFalse() throws Exception
{
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
System.setProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY, "false" );
ServiceUrls urls = new ServiceUrls();
urls.addUrl( EXAMPLE_HTTP );
urls.addUrl( EXAMPLE_HTTPS );
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "http", "/bar", urls ) );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "https", "/bar", urls ) );
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
}
@Test
public void testGetRedirectUrlTrue() throws Exception
{
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
System.setProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY, "true" );
ServiceUrls urls = new ServiceUrls();
urls.addUrl( EXAMPLE_HTTP );
urls.addUrl( EXAMPLE_HTTPS );
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( EXAMPLE_HTTP + "/bar", servlet.getRedirectUrl( "http", "/bar", urls ) );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "https", "/bar", urls ) );
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
}
@Test
public void testGetRedirectUrlTrueOnlyHttp() throws Exception
{
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
System.setProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY, "true" );
ServiceUrls urls = new ServiceUrls();
urls.addUrl( EXAMPLE_HTTP );
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( EXAMPLE_HTTP + "/bar", servlet.getRedirectUrl( "http", "/bar", urls ) );
assertEquals( EXAMPLE_HTTP + "/bar", servlet.getRedirectUrl( "https", "/bar", urls ) );
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
}
@Test
public void testGetRedirectUrlTrueOnlyHttps() throws Exception
{
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
System.setProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY, "true" );
ServiceUrls urls = new ServiceUrls();
urls.addUrl( EXAMPLE_HTTPS );
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "http", "/bar", urls ) );
assertEquals( EXAMPLE_HTTPS + "/bar", servlet.getRedirectUrl( "https", "/bar", urls ) );
System.clearProperty( Constants.MATCH_SCHEME_ON_REDIRECT_PROPERTY );
}
@Test
public void testMakeAutNumInt() throws Exception
{
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( ARIN_HTTP, servlet.makeAutnumBase( "/autnum/10" ).getHttpUrl() );
assertEquals( RIPE_HTTPS, servlet.makeAutnumBase( "/autnum/42222" ).getHttpsUrl() );
}
@Test
public void testMakeIpBase() throws Exception
{
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( ARIN_HTTP, servlet.makeIpBase( "/ip/7.0.0.0/8" ).getHttpUrl() );
assertEquals( ARIN_HTTP, servlet.makeIpBase( "/ip/7.0.0.0/16" ).getHttpUrl() );
assertEquals( ARIN_HTTP, servlet.makeIpBase( "/ip/2620:0000:0000:0000:0000:0000:0000:0000" ).getHttpUrl() );
assertEquals( LACNIC_HTTPS, servlet.makeIpBase( "/ip/172.16.31.10/24" ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, servlet.makeIpBase( "/ip/2800:0000::/12" ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, servlet.makeIpBase( "/ip/172.16.17.32/32" ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, servlet.makeIpBase( "/ip/172.16.17.32" ).getHttpsUrl() );
}
@Test
public void testMakeDomainBase() throws Exception
{
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( ARIN_HTTP, servlet.makeDomainBase( "/domain/0.0.0.7.in-addr.arpa." ).getHttpUrl() );
assertEquals( ARIN_HTTP, servlet.makeDomainBase( "/domain/0.0.0.7.in-addr.arpa" ).getHttpUrl() );
assertEquals( ARIN_HTTP, servlet.makeDomainBase( "/domain/0.7.in-addr.arpa" ).getHttpUrl() );
assertEquals( ARIN_HTTP, servlet.makeDomainBase( "/domain/7.in-addr.arpa" ).getHttpUrl() );
assertEquals( ARIN_HTTP, servlet.makeDomainBase( "/domain/0.2.6.2.ip6.arpa" ).getHttpUrl() );
assertEquals( INFO_HTTPS, servlet.makeDomainBase( "/domain/example.INFO" ).getHttpsUrl() );
assertEquals( INFO_HTTPS, servlet.makeDomainBase( "/domain/example.INFO." ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, servlet.makeDomainBase( "/domain/0.0.8.2.ip6.arpa" ).getHttpsUrl() );
}
@Test
public void testMakeNameserverBase() throws Exception
{
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( INFO_HTTPS, servlet.makeNameserverBase( "/nameserver/ns1.example.INFO" ).getHttpsUrl() );
assertEquals( INFO_HTTPS, servlet.makeNameserverBase( "/nameserver/ns1.example.INFO." ).getHttpsUrl() );
assertNull( servlet.makeNameserverBase( "/nameserver/ns1.5.in-addr.arpa." ) );
}
@Test
public void testMakeEntityBase() throws Exception
{
RedirectServlet servlet = new RedirectServlet();
servlet.init( null );
assertEquals( APNIC_HTTPS, servlet.makeEntityBase( "/entity/ABC123-AP" ).getHttpsUrl() );
assertEquals( ARIN_HTTP, servlet.makeEntityBase( "/entity/ABC123-ARIN" ).getHttpUrl() );
assertEquals( LACNIC_HTTPS, servlet.makeEntityBase( "/entity/ABC123-LACNIC" ).getHttpsUrl() );
assertEquals( RIPE_HTTP, servlet.makeEntityBase( "/entity/ABC123-RIPE" ).getHttpUrl() );
}
}
<|start_filename|>src/test/java/net/arin/rdap_bootstrap/service/DefaultBootstrapTest.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
import net.arin.rdap_bootstrap.service.DefaultBootstrap.Type;
import org.junit.Test;
import static junit.framework.Assert.assertEquals;
import static net.arin.rdap_bootstrap.service.TestConstants.ARIN_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.INFO_HTTPS;
public class DefaultBootstrapTest
{
@Test
public void testAllocations() throws Exception
{
DefaultBootstrap d = new DefaultBootstrap();
d.loadData( new ResourceFiles() );
assertEquals( ARIN_HTTP, d.getServiceUrls( Type.AUTNUM ).getHttpUrl() );
assertEquals( INFO_HTTPS, d.getServiceUrls( Type.DOMAIN ).getHttpsUrl() );
}
}
<|start_filename|>src/test/java/net/arin/rdap_bootstrap/service/IpV4BootstrapTest.java<|end_filename|>
/*
* Copyright (C) 2013-2020 American Registry for Internet Numbers (ARIN)
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
package net.arin.rdap_bootstrap.service;
import static junit.framework.Assert.assertEquals;
import static net.arin.rdap_bootstrap.service.TestConstants.AFRINIC_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.APNIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.ARIN_HTTP;
import static net.arin.rdap_bootstrap.service.TestConstants.LACNIC_HTTPS;
import static net.arin.rdap_bootstrap.service.TestConstants.RIPE_HTTPS;
import org.junit.Test;
public class IpV4BootstrapTest
{
@Test
public void testAllocations() throws Exception
{
IpV4Bootstrap v4 = new IpV4Bootstrap();
v4.loadData( new ResourceFiles() );
// Test prefixes.
assertEquals( AFRINIC_HTTP, v4.getServiceUrls( "41" ).getHttpUrl() );
assertEquals( APNIC_HTTPS, v4.getServiceUrls( "1" ).getHttpsUrl() );
assertEquals( APNIC_HTTPS, v4.getServiceUrls( "27" ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, v4.getServiceUrls( "177" ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, v4.getServiceUrls( "191" ).getHttpsUrl() );
assertEquals( RIPE_HTTPS, v4.getServiceUrls( "31" ).getHttpsUrl() );
assertEquals( RIPE_HTTPS, v4.getServiceUrls( "188" ).getHttpsUrl() );
// Test full prefixes.
assertEquals( ARIN_HTTP, v4.getServiceUrls( "2192.168.127.12/8" ).getHttpUrl() );
assertEquals( LACNIC_HTTPS, v4.getServiceUrls( "192.168.127.12/8" ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, v4.getServiceUrls( "192.168.127.12/32" ).getHttpsUrl() );
assertEquals( LACNIC_HTTPS, v4.getServiceUrls( "192.168.127.12" ).getHttpsUrl() );
}
}
| salimwp/rdap_bootstrap_server |
<|start_filename|>Dockerfile<|end_filename|>
FROM ubuntu
USER root
# Install basic tools/utilities and google Chrome unstable (which has cross platform support for headless mode). Combining theem together so that apt cache cleanup would need to be done just once.
RUN apt-get update -y && \
apt-get install ca-certificates \
autoconf \
build-essential \
gconf-service \
libasound2 \
libatk1.0-0 \
libatk1.0-0 \
libdbus-1-3 \
libgconf-2-4 \
libgtk-3-0 \
libnspr4 \
libnss3 \
libtool \
libx11-xcb1 \
libxss1 \
libxtst6 \
pulseaudio \
fonts-liberation \
libappindicator1 \
xdg-utils \
wget \
curl \
xz-utils -y --no-install-recommends \
lsb-release \
supervisor \
libnss3-tools \
libfontconfig1 \
apt-transport-https \
inotify-tools \
libappindicator3-1
RUN sed -i 's/^\(\[supervisord\]\)$/\1\nnodaemon=true/' /etc/supervisor/supervisord.conf
RUN wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
RUN dpkg -i google-chrome*.deb
RUN apt-get install -f
RUN apt-get clean autoclean
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* google-chrome-stable_current_amd64.deb
# Install nodejs
ENV NPM_CONFIG_LOGLEVEL=info NODE_VERSION=8.3.0
RUN curl -SLO "https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.xz" \
&& tar -xJf "node-v$NODE_VERSION-linux-x64.tar.xz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-x64.tar.xz" \
&& ln -s /usr/local/bin/node /usr/local/bin/nodejs
RUN npm install -g yarn
RUN mkdir -p /src/
ADD supervisord.conf /etc/supervisor/
ADD package.json /src/
ADD yarn.lock /src/
ADD server.js /src/
ADD renderer.proto /src/
WORKDIR /src/
RUN rm -rf node_modules
RUN yarn
# Define working directory.
WORKDIR /etc/supervisor
# Define default command.
CMD ["supervisord", "-c", "/etc/supervisor/supervisord.conf"]
<|start_filename|>renderer.pb.go<|end_filename|>
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: renderer.proto
/*
Package renderer is a generated protocol buffer package.
It is generated from these files:
renderer.proto
It has these top-level messages:
Request
Response
Empty
*/
package renderer
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
type Request struct {
Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"`
}
func (m *Request) Reset() { *m = Request{} }
func (m *Request) String() string { return proto.CompactTextString(m) }
func (*Request) ProtoMessage() {}
func (*Request) Descriptor() ([]byte, []int) { return fileDescriptorRenderer, []int{0} }
func (m *Request) GetUrl() string {
if m != nil {
return m.Url
}
return ""
}
type Response struct {
Data string `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"`
}
func (m *Response) Reset() { *m = Response{} }
func (m *Response) String() string { return proto.CompactTextString(m) }
func (*Response) ProtoMessage() {}
func (*Response) Descriptor() ([]byte, []int) { return fileDescriptorRenderer, []int{1} }
func (m *Response) GetData() string {
if m != nil {
return m.Data
}
return ""
}
// not using google.proto.Empty because it doesn't play nicely with dynamic imports
type Empty struct {
}
func (m *Empty) Reset() { *m = Empty{} }
func (m *Empty) String() string { return proto.CompactTextString(m) }
func (*Empty) ProtoMessage() {}
func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptorRenderer, []int{2} }
func init() {
proto.RegisterType((*Request)(nil), "renderer.Request")
proto.RegisterType((*Response)(nil), "renderer.Response")
proto.RegisterType((*Empty)(nil), "renderer.Empty")
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// Client API for RendererService service
type RendererServiceClient interface {
CheckHealth(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error)
Render(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error)
Screenshot(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error)
Print(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error)
}
type rendererServiceClient struct {
cc *grpc.ClientConn
}
func NewRendererServiceClient(cc *grpc.ClientConn) RendererServiceClient {
return &rendererServiceClient{cc}
}
func (c *rendererServiceClient) CheckHealth(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) {
out := new(Empty)
err := grpc.Invoke(ctx, "/renderer.RendererService/CheckHealth", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *rendererServiceClient) Render(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error) {
out := new(Response)
err := grpc.Invoke(ctx, "/renderer.RendererService/Render", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *rendererServiceClient) Screenshot(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error) {
out := new(Response)
err := grpc.Invoke(ctx, "/renderer.RendererService/Screenshot", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *rendererServiceClient) Print(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error) {
out := new(Response)
err := grpc.Invoke(ctx, "/renderer.RendererService/Print", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// Server API for RendererService service
type RendererServiceServer interface {
CheckHealth(context.Context, *Empty) (*Empty, error)
Render(context.Context, *Request) (*Response, error)
Screenshot(context.Context, *Request) (*Response, error)
Print(context.Context, *Request) (*Response, error)
}
func RegisterRendererServiceServer(s *grpc.Server, srv RendererServiceServer) {
s.RegisterService(&_RendererService_serviceDesc, srv)
}
func _RendererService_CheckHealth_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Empty)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(RendererServiceServer).CheckHealth(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/renderer.RendererService/CheckHealth",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(RendererServiceServer).CheckHealth(ctx, req.(*Empty))
}
return interceptor(ctx, in, info, handler)
}
func _RendererService_Render_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Request)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(RendererServiceServer).Render(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/renderer.RendererService/Render",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(RendererServiceServer).Render(ctx, req.(*Request))
}
return interceptor(ctx, in, info, handler)
}
func _RendererService_Screenshot_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Request)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(RendererServiceServer).Screenshot(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/renderer.RendererService/Screenshot",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(RendererServiceServer).Screenshot(ctx, req.(*Request))
}
return interceptor(ctx, in, info, handler)
}
func _RendererService_Print_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Request)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(RendererServiceServer).Print(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/renderer.RendererService/Print",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(RendererServiceServer).Print(ctx, req.(*Request))
}
return interceptor(ctx, in, info, handler)
}
var _RendererService_serviceDesc = grpc.ServiceDesc{
ServiceName: "renderer.RendererService",
HandlerType: (*RendererServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "CheckHealth",
Handler: _RendererService_CheckHealth_Handler,
},
{
MethodName: "Render",
Handler: _RendererService_Render_Handler,
},
{
MethodName: "Screenshot",
Handler: _RendererService_Screenshot_Handler,
},
{
MethodName: "Print",
Handler: _RendererService_Print_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "renderer.proto",
}
func init() { proto.RegisterFile("renderer.proto", fileDescriptorRenderer) }
var fileDescriptorRenderer = []byte{
// 203 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2b, 0x4a, 0xcd, 0x4b,
0x49, 0x2d, 0x4a, 0x2d, 0xd2, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x80, 0xf1, 0x95, 0xa4,
0xb9, 0xd8, 0x83, 0x52, 0x0b, 0x4b, 0x53, 0x8b, 0x4b, 0x84, 0x04, 0xb8, 0x98, 0x4b, 0x8b, 0x72,
0x24, 0x18, 0x15, 0x18, 0x35, 0x38, 0x83, 0x40, 0x4c, 0x25, 0x39, 0x2e, 0x8e, 0xa0, 0xd4, 0xe2,
0x82, 0xfc, 0xbc, 0xe2, 0x54, 0x21, 0x21, 0x2e, 0x96, 0x94, 0xc4, 0x92, 0x44, 0xa8, 0x34, 0x98,
0xad, 0xc4, 0xce, 0xc5, 0xea, 0x9a, 0x5b, 0x50, 0x52, 0x69, 0xf4, 0x80, 0x91, 0x8b, 0x3f, 0x08,
0x6a, 0x64, 0x70, 0x6a, 0x51, 0x59, 0x66, 0x72, 0xaa, 0x90, 0x21, 0x17, 0xb7, 0x73, 0x46, 0x6a,
0x72, 0xb6, 0x47, 0x6a, 0x62, 0x4e, 0x49, 0x86, 0x10, 0xbf, 0x1e, 0xdc, 0x0d, 0x60, 0x3d, 0x52,
0xe8, 0x02, 0x4a, 0x0c, 0x42, 0x86, 0x5c, 0x6c, 0x10, 0x53, 0x84, 0x04, 0x11, 0x92, 0x50, 0xe7,
0x49, 0x09, 0x21, 0x0b, 0x41, 0x1c, 0xa5, 0xc4, 0x20, 0x64, 0xca, 0xc5, 0x15, 0x9c, 0x5c, 0x94,
0x9a, 0x9a, 0x57, 0x9c, 0x91, 0x5f, 0x42, 0xbc, 0x36, 0x03, 0x2e, 0xd6, 0x80, 0xa2, 0xcc, 0x3c,
0xe2, 0x75, 0x38, 0x71, 0x45, 0xc1, 0x03, 0x2d, 0x89, 0x0d, 0x1c, 0x8a, 0xc6, 0x80, 0x00, 0x00,
0x00, 0xff, 0xff, 0xf3, 0x5c, 0xa6, 0x07, 0x57, 0x01, 0x00, 0x00,
}
<|start_filename|>server.js<|end_filename|>
const PROTO_PATH = __dirname + "/renderer.proto";
var fs = require("fs");
const grpc = require("grpc");
const { Chromeless } = require("chromeless");
const renderer_proto = grpc.load(PROTO_PATH).renderer;
const agent =
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36";
async function runRender(url) {
const chromeless = new Chromeless();
let html = await chromeless
.setUserAgent(agent)
.goto(url)
.wait(1500)
.html();
await chromeless.end();
return html;
}
async function runPrint(url) {
const chromeless = new Chromeless();
let src = await chromeless
.setUserAgent(agent)
.goto(url)
.wait(1500)
.pdf();
await chromeless.end();
var bmp = fs.readFileSync(src);
return new Buffer(bmp).toString("base64");
}
async function runScreenshot(url) {
const chromeless = new Chromeless();
let src = await chromeless
.setUserAgent(agent)
.goto(url)
.wait(1500)
.screenshot();
await chromeless.end();
var bmp = fs.readFileSync(src);
return new Buffer(bmp).toString("base64");
}
/**
* Implements the CheckHealth RPC method.
*/
function checkHealth(empty, callback) {
// TODO - can we validate that chromeless is working?
callback(null, {});
}
/**
* Implements the Render RPC method.
*/
function render(req, callback) {
runRender(req.request.url)
.then(data => callback(null, { data }))
.catch(err => callback(err));
}
/**
* Implements the Print RPC method.
*/
function print(req, callback) {
runPrint(req.request.url)
.then(data => callback(null, { data }))
.catch(err => callback(err));
}
/**
* Implements the Screenshot RPC method.
*/
function screenshot(req, callback) {
runScreenshot(req.request.url)
.then(data => callback(null, { data }))
.catch(err => callback(err));
}
/**
* Starts an RPC server that receives requests for the Greeter service at the
* sample server port
*/
function main() {
var server = new grpc.Server();
server.addService(renderer_proto.RendererService.service, {
checkHealth,
render,
screenshot,
print
});
server.bind("0.0.0.0:3000", grpc.ServerCredentials.createInsecure());
server.start();
}
main();
| moonlightwork/renderer |
<|start_filename|>preproc.h<|end_filename|>
#ifndef PREPROC_H
#define PREPROC_H
#include <stdio.h>
struct cpp;
struct cpp *cpp_new(void);
void cpp_free(struct cpp*);
void cpp_add_includedir(struct cpp *cpp, const char* includedir);
int cpp_add_define(struct cpp *cpp, const char *mdecl);
int cpp_run(struct cpp *cpp, FILE* in, FILE* out, const char* inname);
#ifdef __GNUC__
#pragma GCC diagnostic ignored "-Wunknown-pragmas"
#endif
#pragma RcB2 DEP "preproc.c"
#endif
<|start_filename|>cppmain.c<|end_filename|>
#include "preproc.h"
#include <string.h>
#include <unistd.h>
static int usage(char *a0) {
fprintf(stderr,
"example preprocessor\n"
"usage: %s [-I includedir...] [-D define] file\n"
"if no filename or '-' is passed, stdin is used.\n"
, a0);
return 1;
}
int main(int argc, char** argv) {
int c; char* tmp;
struct cpp* cpp = cpp_new();
while ((c = getopt(argc, argv, "D:I:")) != EOF) switch(c) {
case 'I': cpp_add_includedir(cpp, optarg); break;
case 'D':
if((tmp = strchr(optarg, '='))) *tmp = ' ';
cpp_add_define(cpp, optarg);
break;
default: return usage(argv[0]);
}
char *fn = "stdin";
FILE *in = stdin;
if(argv[optind] && strcmp(argv[optind], "-")) {
fn = argv[optind];
in = fopen(fn, "r");
if(!in) {
perror("fopen");
return 1;
}
}
int ret = cpp_run(cpp, in, stdout, fn);
cpp_free(cpp);
if(in != stdin) fclose(in);
return !ret;
}
<|start_filename|>preproc.c<|end_filename|>
#include <string.h>
#include <ctype.h>
#include <assert.h>
#include "preproc.h"
#include "tokenizer.h"
#include "tglist.h"
#include "hbmap.h"
#define MACRO_FLAG_OBJECTLIKE (1U<<31)
#define MACRO_FLAG_VARIADIC (1U<<30)
#define MACRO_ARGCOUNT_MASK (~(0|MACRO_FLAG_OBJECTLIKE|MACRO_FLAG_VARIADIC))
#define OBJECTLIKE(M) (M->num_args & MACRO_FLAG_OBJECTLIKE)
#define FUNCTIONLIKE(M) (!(OBJECTLIKE(M)))
#define MACRO_ARGCOUNT(M) (M->num_args & MACRO_ARGCOUNT_MASK)
#define MACRO_VARIADIC(M) (M->num_args & MACRO_FLAG_VARIADIC)
#define MAX_RECURSION 32
static unsigned string_hash(const char* s) {
uint_fast32_t h = 0;
while (*s) {
h = 16*h + *s++;
h ^= h>>24 & 0xf0;
}
return h & 0xfffffff;
}
struct macro {
unsigned num_args;
FILE* str_contents;
char *str_contents_buf;
tglist(char*) argnames;
};
struct cpp {
tglist(char*) includedirs;
hbmap(char*, struct macro, 128) *macros;
const char *last_file;
int last_line;
struct tokenizer *tchain[MAX_RECURSION];
};
static int token_needs_string(struct token *tok) {
switch(tok->type) {
case TT_IDENTIFIER:
case TT_WIDECHAR_LIT:
case TT_WIDESTRING_LIT:
case TT_SQSTRING_LIT:
case TT_DQSTRING_LIT:
case TT_ELLIPSIS:
case TT_HEX_INT_LIT:
case TT_OCT_INT_LIT:
case TT_DEC_INT_LIT:
case TT_FLOAT_LIT:
case TT_UNKNOWN:
return 1;
default:
return 0;
}
}
static void tokenizer_from_file(struct tokenizer *t, FILE* f) {
tokenizer_init(t, f, TF_PARSE_STRINGS);
tokenizer_set_filename(t, "<macro>");
tokenizer_rewind(t);
}
static int strptrcmp(const void *a, const void *b) {
const char * const *x = a;
const char * const *y = b;
return strcmp(*x, *y);
}
static struct macro* get_macro(struct cpp *cpp, const char *name) {
return hbmap_get(cpp->macros, name);
}
static void add_macro(struct cpp *cpp, const char *name, struct macro*m) {
hbmap_insert(cpp->macros, name, *m);
}
static int undef_macro(struct cpp *cpp, const char *name) {
hbmap_iter k = hbmap_find(cpp->macros, name);
if(k == (hbmap_iter) -1) return 0;
struct macro *m = &hbmap_getval(cpp->macros, k);
free(hbmap_getkey(cpp->macros, k));
if(m->str_contents) fclose(m->str_contents);
free(m->str_contents_buf);
tglist_free_values(&m->argnames);
tglist_free_items(&m->argnames);
hbmap_delete(cpp->macros, k);
return 1;
}
static void free_macros(struct cpp *cpp) {
hbmap_iter i;
hbmap_foreach(cpp->macros, i) {
while(hbmap_iter_index_valid(cpp->macros, i))
undef_macro(cpp, hbmap_getkey(cpp->macros, i));
}
hbmap_fini(cpp->macros, 1);
free(cpp->macros);
}
static void error_or_warning(const char *err, const char* type, struct tokenizer *t, struct token *curr) {
unsigned column = curr ? curr->column : t->column;
unsigned line = curr ? curr->line : t->line;
dprintf(2, "<%s> %u:%u %s: '%s'\n", t->filename, line, column, type, err);
dprintf(2, "%s\n", t->buf);
for(int i = 0; i < strlen(t->buf); i++)
dprintf(2, "^");
dprintf(2, "\n");
}
static void error(const char *err, struct tokenizer *t, struct token *curr) {
error_or_warning(err, "error", t, curr);
}
static void warning(const char *err, struct tokenizer *t, struct token *curr) {
error_or_warning(err, "warning", t, curr);
}
static void emit(FILE *out, const char *s) {
fprintf(out, "%s", s);
}
static int x_tokenizer_next_of(struct tokenizer *t, struct token *tok, int fail_unk) {
int ret = tokenizer_next(t, tok);
if(tok->type == TT_OVERFLOW) {
error("max token length of 4095 exceeded!", t, tok);
return 0;
} else if (fail_unk && ret == 0) {
error("tokenizer encountered unknown token", t, tok);
return 0;
}
return 1;
}
#define tokenizer_next(T, TOK) x_tokenizer_next_of(T, TOK, 0)
#define x_tokenizer_next(T, TOK) x_tokenizer_next_of(T, TOK, 1)
static int is_whitespace_token(struct token *token)
{
return token->type == TT_SEP &&
(token->value == ' ' || token->value == '\t');
}
/* return index of matching item in values array, or -1 on error */
static int expect(struct tokenizer *t, enum tokentype tt, const char* values[], struct token *token)
{
int ret;
do {
ret = tokenizer_next(t, token);
if(ret == 0 || token->type == TT_EOF) goto err;
} while(is_whitespace_token(token));
if(token->type != tt) {
err:
error("unexpected token", t, token);
return -1;
}
int i = 0;
while(values[i]) {
if(!strcmp(values[i], t->buf))
return i;
++i;
}
return -1;
}
static int is_char(struct token *tok, int ch) {
return tok->type == TT_SEP && tok->value == ch;
}
static void flush_whitespace(FILE *out, int *ws_count) {
while(*ws_count > 0) {
emit(out, " ");
--(*ws_count);
}
}
/* skips until the next non-whitespace token (if the current one is one too)*/
static int eat_whitespace(struct tokenizer *t, struct token *token, int *count) {
*count = 0;
int ret = 1;
while (is_whitespace_token(token)) {
++(*count);
ret = x_tokenizer_next(t, token);
if(!ret) break;
}
return ret;
}
/* fetches the next token until it is non-whitespace */
static int skip_next_and_ws(struct tokenizer *t, struct token *tok) {
int ret = tokenizer_next(t, tok);
if(!ret) return ret;
int ws_count;
ret = eat_whitespace(t, tok, &ws_count);
return ret;
}
static void emit_token(FILE* out, struct token *tok, const char* strbuf) {
if(tok->type == TT_SEP) {
fprintf(out, "%c", tok->value);
} else if(strbuf && token_needs_string(tok)) {
fprintf(out, "%s", strbuf);
} else {
dprintf(2, "oops, dunno how to handle tt %d (%s)\n", (int) tok->type, strbuf);
}
}
int parse_file(struct cpp* cpp, FILE *f, const char*, FILE *out);
static int include_file(struct cpp* cpp, struct tokenizer *t, FILE* out) {
static const char* inc_chars[] = { "\"", "<", 0};
static const char* inc_chars_end[] = { "\"", ">", 0};
struct token tok;
tokenizer_set_flags(t, 0); // disable string tokenization
int inc1sep = expect(t, TT_SEP, inc_chars, &tok);
if(inc1sep == -1) {
error("expected one of [\"<]", t, &tok);
return 0;
}
int ret = tokenizer_read_until(t, inc_chars_end[inc1sep], 1);
if(!ret) {
error("error parsing filename", t, &tok);
return 0;
}
// TODO: different path lookup depending on whether " or <
size_t i;
FILE *f = 0;
tglist_foreach(&cpp->includedirs, i) {
char buf[512];
snprintf(buf, sizeof buf, "%s/%s", tglist_get(&cpp->includedirs, i), t->buf);
f = fopen(buf, "r");
if(f) break;
}
if(!f) {
dprintf(2, "%s: ", t->buf);
perror("fopen");
return 0;
}
const char *fn = strdup(t->buf);
assert(tokenizer_next(t, &tok) && is_char(&tok, inc_chars_end[inc1sep][0]));
tokenizer_set_flags(t, TF_PARSE_STRINGS);
return parse_file(cpp, f, fn, out);
}
static int emit_error_or_warning(struct tokenizer *t, int is_error) {
int ws_count;
int ret = tokenizer_skip_chars(t, " \t", &ws_count);
if(!ret) return ret;
struct token tmp = {.column = t->column, .line = t->line};
ret = tokenizer_read_until(t, "\n", 1);
if(is_error) {
error(t->buf, t, &tmp);
return 0;
}
warning(t->buf, t, &tmp);
return 1;
}
static FILE *freopen_r(FILE *f, char **buf, size_t *size) {
fflush(f);
fclose(f);
return fmemopen(*buf, *size, "r");
}
static int consume_nl_and_ws(struct tokenizer *t, struct token *tok, int expected) {
if(!x_tokenizer_next(t, tok)) {
err:
error("unexpected", t, tok);
return 0;
}
if(expected) {
if(tok->type != TT_SEP || tok->value != expected) goto err;
switch(expected) {
case '\\' : expected = '\n'; break;
case '\n' : expected = 0; break;
}
} else {
if(is_whitespace_token(tok)) ;
else if(is_char(tok, '\\')) expected = '\n';
else return 1;
}
return consume_nl_and_ws(t, tok, expected);
}
static int expand_macro(struct cpp *cpp, struct tokenizer *t, FILE* out, const char* name, unsigned rec_level, char *visited[]);
static int parse_macro(struct cpp *cpp, struct tokenizer *t) {
int ws_count;
int ret = tokenizer_skip_chars(t, " \t", &ws_count);
if(!ret) return ret;
struct token curr; //tmp = {.column = t->column, .line = t->line};
ret = tokenizer_next(t, &curr) && curr.type != TT_EOF;
if(!ret) {
error("parsing macro name", t, &curr);
return ret;
}
if(curr.type != TT_IDENTIFIER) {
error("expected identifier", t, &curr);
return 0;
}
const char* macroname = strdup(t->buf);
#ifdef DEBUG
dprintf(2, "parsing macro %s\n", macroname);
#endif
int redefined = 0;
if(get_macro(cpp, macroname)) {
if(!strcmp(macroname, "defined")) {
error("\"defined\" cannot be used as a macro name", t, &curr);
return 0;
}
redefined = 1;
}
struct macro new = { 0 };
unsigned macro_flags = MACRO_FLAG_OBJECTLIKE;
tglist_init(&new.argnames);
ret = x_tokenizer_next(t, &curr) && curr.type != TT_EOF;
if(!ret) return ret;
if (is_char(&curr, '(')) {
macro_flags = 0;
unsigned expected = 0;
while(1) {
/* process next function argument identifier */
ret = consume_nl_and_ws(t, &curr, expected);
if(!ret) {
error("unexpected", t, &curr);
return ret;
}
expected = 0;
if(curr.type == TT_SEP) {
switch(curr.value) {
case '\\':
expected = '\n';
continue;
case ',':
continue;
case ')':
ret = tokenizer_skip_chars(t, " \t", &ws_count);
if(!ret) return ret;
goto break_loop1;
default:
error("unexpected character", t, &curr);
return 0;
}
} else if(!(curr.type == TT_IDENTIFIER || curr.type == TT_ELLIPSIS)) {
error("expected identifier for macro arg", t, &curr);
return 0;
}
{
if(curr.type == TT_ELLIPSIS) {
if(macro_flags & MACRO_FLAG_VARIADIC) {
error("\"...\" isn't the last parameter", t, &curr);
return 0;
}
macro_flags |= MACRO_FLAG_VARIADIC;
}
char *tmps = strdup(t->buf);
tglist_add(&new.argnames, tmps);
}
++new.num_args;
}
break_loop1:;
} else if(is_whitespace_token(&curr)) {
ret = tokenizer_skip_chars(t, " \t", &ws_count);
if(!ret) return ret;
} else if(is_char(&curr, '\n')) {
/* content-less macro */
goto done;
}
struct FILE_container {
FILE *f;
char *buf;
size_t len;
} contents;
contents.f = open_memstream(&contents.buf, &contents.len);
int backslash_seen = 0;
while(1) {
/* ignore unknown tokens in macro body */
ret = tokenizer_next(t, &curr);
if(!ret) return 0;
if(curr.type == TT_EOF) break;
if (curr.type == TT_SEP) {
if(curr.value == '\\')
backslash_seen = 1;
else {
if(curr.value == '\n' && !backslash_seen) break;
emit_token(contents.f, &curr, t->buf);
backslash_seen = 0;
}
} else {
emit_token(contents.f, &curr, t->buf);
}
}
new.str_contents = freopen_r(contents.f, &contents.buf, &contents.len);
new.str_contents_buf = contents.buf;
done:
if(redefined) {
struct macro *old = get_macro(cpp, macroname);
char *s_old = old->str_contents_buf ? old->str_contents_buf : "";
char *s_new = new.str_contents_buf ? new.str_contents_buf : "";
if(strcmp(s_old, s_new)) {
char buf[128];
sprintf(buf, "redefinition of macro %s", macroname);
warning(buf, t, 0);
}
}
new.num_args |= macro_flags;
add_macro(cpp, macroname, &new);
return 1;
}
static size_t macro_arglist_pos(struct macro *m, const char* iden) {
size_t i;
for(i = 0; i < tglist_getsize(&m->argnames); i++) {
char *item = tglist_get(&m->argnames, i);
if(!strcmp(item, iden)) return i;
}
return (size_t) -1;
}
struct macro_info {
const char *name;
unsigned nest;
unsigned first;
unsigned last;
};
static int was_visited(const char *name, char*visited[], unsigned rec_level) {
int x;
for(x = rec_level; x >= 0; --x) {
if(!strcmp(visited[x], name)) return 1;
}
return 0;
}
unsigned get_macro_info(struct cpp* cpp,
struct tokenizer *t,
struct macro_info *mi_list, size_t *mi_cnt,
unsigned nest, unsigned tpos, const char *name,
char* visited[], unsigned rec_level
) {
int brace_lvl = 0;
while(1) {
struct token tok;
int ret = tokenizer_next(t, &tok);
if(!ret || tok.type == TT_EOF) break;
#ifdef DEBUG
dprintf(2, "(%s) nest %d, brace %u t: %s\n", name, nest, brace_lvl, t->buf);
#endif
struct macro* m = 0;
if(tok.type == TT_IDENTIFIER && (m = get_macro(cpp, t->buf)) && !was_visited(t->buf, visited, rec_level)) {
const char* newname = strdup(t->buf);
if(FUNCTIONLIKE(m)) {
if(tokenizer_peek(t) == '(') {
unsigned tpos_save = tpos;
tpos = get_macro_info(cpp, t, mi_list, mi_cnt, nest+1, tpos+1, newname, visited, rec_level);
mi_list[*mi_cnt] = (struct macro_info) {
.name = newname,
.nest=nest+1,
.first = tpos_save,
.last = tpos + 1};
++(*mi_cnt);
} else {
/* suppress expansion */
}
} else {
mi_list[*mi_cnt] = (struct macro_info) {
.name = newname,
.nest=nest+1,
.first = tpos,
.last = tpos + 1};
++(*mi_cnt);
}
} else if(is_char(&tok, '(')) {
++brace_lvl;
} else if(is_char(&tok, ')')) {
--brace_lvl;
if(brace_lvl == 0 && nest != 0) break;
}
++tpos;
}
return tpos;
}
struct FILE_container {
FILE *f;
char *buf;
size_t len;
struct tokenizer t;
};
static void free_file_container(struct FILE_container *fc) {
fclose(fc->f);
free(fc->buf);
}
static int mem_tokenizers_join(
struct FILE_container* org, struct FILE_container *inj,
struct FILE_container* result,
int first, off_t lastpos) {
result->f = open_memstream(&result->buf, &result->len);
size_t i;
struct token tok;
int ret;
tokenizer_rewind(&org->t);
for(i=0; i<first; ++i) {
ret = tokenizer_next(&org->t, &tok);
assert(ret && tok.type != TT_EOF);
emit_token(result->f, &tok, org->t.buf);
}
int cnt = 0, last = first;
while(1) {
ret = tokenizer_next(&inj->t, &tok);
if(!ret || tok.type == TT_EOF) break;
emit_token(result->f, &tok, inj->t.buf);
++cnt;
}
while(tokenizer_ftello(&org->t) < lastpos) {
ret = tokenizer_next(&org->t, &tok);
last++;
}
int diff = cnt - ((int) last - (int) first);
while(1) {
ret = tokenizer_next(&org->t, &tok);
if(!ret || tok.type == TT_EOF) break;
emit_token(result->f, &tok, org->t.buf);
}
result->f = freopen_r(result->f, &result->buf, &result->len);
tokenizer_from_file(&result->t, result->f);
return diff;
}
static int tchain_parens_follows(struct cpp *cpp, int rec_level) {
int i, c = 0;
for(i=rec_level;i>=0;--i) {
c = tokenizer_peek(cpp->tchain[i]);
if(c == EOF) continue;
if(c == '(') return i;
else break;
}
return -1;
}
static int stringify(struct cpp *ccp, struct tokenizer *t, FILE* output) {
int ret = 1;
struct token tok;
emit(output, "\"");
while(1) {
ret = tokenizer_next(t, &tok);
if(!ret) return ret;
if(tok.type == TT_EOF) break;
if(is_char(&tok, '\n')) continue;
if(is_char(&tok, '\\') && tokenizer_peek(t) == '\n') continue;
if(tok.type == TT_DQSTRING_LIT) {
char *s = t->buf;
char buf[2] = {0};
while(*s) {
if(*s == '\"') {
emit(output, "\\\"");
} else if (*s == '\\') {
emit(output, "\\\\");
} else {
buf[0] = *s;
emit(output, buf);
}
++s;
}
} else
emit_token(output, &tok, t->buf);
}
emit(output, "\"");
return ret;
}
/* rec_level -1 serves as a magic value to signal we're using
expand_macro from the if-evaluator code, which means activating
the "define" macro */
static int expand_macro(struct cpp* cpp, struct tokenizer *t, FILE* out, const char* name, unsigned rec_level, char* visited[]) {
int is_define = !strcmp(name, "defined");
struct macro *m;
if(is_define && rec_level != -1)
m = NULL;
else m = get_macro(cpp, name);
if(!m) {
emit(out, name);
return 1;
}
if(rec_level == -1) rec_level = 0;
if(rec_level >= MAX_RECURSION) {
error("max recursion level reached", t, 0);
return 0;
}
#ifdef DEBUG
dprintf(2, "lvl %u: expanding macro %s (%s)\n", rec_level, name, m->str_contents_buf);
#endif
if(rec_level == 0 && strcmp(t->filename, "<macro>")) {
cpp->last_file = t->filename;
cpp->last_line = t->line;
}
if(!strcmp(name, "__FILE__")) {
emit(out, "\"");
emit(out, cpp->last_file);
emit(out, "\"");
return 1;
} else if(!strcmp(name, "__LINE__")) {
char buf[64];
sprintf(buf, "%d", cpp->last_line);
emit(out, buf);
return 1;
}
if(visited[rec_level]) free(visited[rec_level]);
visited[rec_level] = strdup(name);
cpp->tchain[rec_level] = t;
size_t i;
struct token tok;
unsigned num_args = MACRO_ARGCOUNT(m);
struct FILE_container *argvalues = calloc(MACRO_VARIADIC(m) ? num_args + 1 : num_args, sizeof(struct FILE_container));
for(i=0; i < num_args; i++)
argvalues[i].f = open_memstream(&argvalues[i].buf, &argvalues[i].len);
/* replace named arguments in the contents of the macro call */
if(FUNCTIONLIKE(m)) {
int ret;
if((ret = tokenizer_peek(t)) != '(') {
/* function-like macro shall not be expanded if not followed by '(' */
if(ret == EOF && rec_level > 0 && (ret = tchain_parens_follows(cpp, rec_level-1)) != -1) {
// warning("Replacement text involved subsequent text", t, 0);
t = cpp->tchain[ret];
} else {
emit(out, name);
goto cleanup;
}
}
ret = x_tokenizer_next(t, &tok);
assert(ret && is_char(&tok, '('));
unsigned curr_arg = 0, need_arg = 1, parens = 0;
int ws_count;
if(!tokenizer_skip_chars(t, " \t", &ws_count)) return 0;
int varargs = 0;
if(num_args == 1 && MACRO_VARIADIC(m)) varargs = 1;
while(1) {
int ret = tokenizer_next(t, &tok);
if(!ret) return 0;
if( tok.type == TT_EOF) {
dprintf(2, "warning EOF\n");
break;
}
if(!parens && is_char(&tok, ',') && !varargs) {
if(need_arg && !ws_count) {
/* empty argument is OK */
}
need_arg = 1;
if(!varargs) curr_arg++;
if(curr_arg + 1 == num_args && MACRO_VARIADIC(m)) {
varargs = 1;
} else if(curr_arg >= num_args) {
error("too many arguments for function macro", t, &tok);
return 0;
}
ret = tokenizer_skip_chars(t, " \t", &ws_count);
if(!ret) return ret;
continue;
} else if(is_char(&tok, '(')) {
++parens;
} else if(is_char(&tok, ')')) {
if(!parens) {
if(curr_arg + num_args && curr_arg < num_args-1) {
error("too few args for function macro", t, &tok);
return 0;
}
break;
}
--parens;
} else if(is_char(&tok, '\\')) {
if(tokenizer_peek(t) == '\n') continue;
}
need_arg = 0;
emit_token(argvalues[curr_arg].f, &tok, t->buf);
}
}
for(i=0; i < num_args; i++) {
argvalues[i].f = freopen_r(argvalues[i].f, &argvalues[i].buf, &argvalues[i].len);
tokenizer_from_file(&argvalues[i].t, argvalues[i].f);
#ifdef DEBUG
dprintf(2, "macro argument %i: %s\n", (int) i, argvalues[i].buf);
#endif
}
if(is_define) {
if(get_macro(cpp, argvalues[0].buf))
emit(out, "1");
else
emit(out, "0");
}
if(!m->str_contents) goto cleanup;
struct FILE_container cwae = {0}; /* contents_with_args_expanded */
cwae.f = open_memstream(&cwae.buf, &cwae.len);
FILE* output = cwae.f;
struct tokenizer t2;
tokenizer_from_file(&t2, m->str_contents);
int hash_count = 0;
int ws_count = 0;
while(1) {
int ret;
ret = tokenizer_next(&t2, &tok);
if(!ret) return 0;
if(tok.type == TT_EOF) break;
if(tok.type == TT_IDENTIFIER) {
flush_whitespace(output, &ws_count);
char *id = t2.buf;
if(MACRO_VARIADIC(m) && !strcmp(t2.buf, "__VA_ARGS__")) {
id = "...";
}
size_t arg_nr = macro_arglist_pos(m, id);
if(arg_nr != (size_t) -1) {
tokenizer_rewind(&argvalues[arg_nr].t);
if(hash_count == 1) ret = stringify(cpp, &argvalues[arg_nr].t, output);
else while(1) {
ret = tokenizer_next(&argvalues[arg_nr].t, &tok);
if(!ret) return ret;
if(tok.type == TT_EOF) break;
emit_token(output, &tok, argvalues[arg_nr].t.buf);
}
hash_count = 0;
} else {
if(hash_count == 1) {
hash_err:
error("'#' is not followed by macro parameter", &t2, &tok);
return 0;
}
emit_token(output, &tok, t2.buf);
}
} else if(is_char(&tok, '#')) {
if(hash_count) {
goto hash_err;
}
while(1) {
++hash_count;
/* in a real cpp we'd need to look for '\\' first */
while(tokenizer_peek(&t2) == '\n') {
x_tokenizer_next(&t2, &tok);
}
if(tokenizer_peek(&t2) == '#') x_tokenizer_next(&t2, &tok);
else break;
}
if(hash_count == 1) flush_whitespace(output, &ws_count);
else if(hash_count > 2) {
error("only two '#' characters allowed for macro expansion", &t2, &tok);
return 0;
}
if(hash_count == 2)
ret = tokenizer_skip_chars(&t2, " \t\n", &ws_count);
else
ret = tokenizer_skip_chars(&t2, " \t", &ws_count);
if(!ret) return ret;
ws_count = 0;
} else if(is_whitespace_token(&tok)) {
ws_count++;
} else {
if(hash_count == 1) goto hash_err;
flush_whitespace(output, &ws_count);
emit_token(output, &tok, t2.buf);
}
}
flush_whitespace(output, &ws_count);
/* we need to expand macros after the macro arguments have been inserted */
if(1) {
cwae.f = freopen_r(cwae.f, &cwae.buf, &cwae.len);
#ifdef DEBUG
dprintf(2, "contents with args expanded: %s\n", cwae.buf);
#endif
tokenizer_from_file(&cwae.t, cwae.f);
size_t mac_cnt = 0;
while(1) {
int ret = tokenizer_next(&cwae.t, &tok);
if(!ret) return ret;
if(tok.type == TT_EOF) break;
if(tok.type == TT_IDENTIFIER && get_macro(cpp, cwae.t.buf))
++mac_cnt;
}
tokenizer_rewind(&cwae.t);
struct macro_info *mcs = calloc(mac_cnt, sizeof(struct macro_info));
{
size_t mac_iter = 0;
get_macro_info(cpp, &cwae.t, mcs, &mac_iter, 0, 0, "null", visited, rec_level);
/* some of the macros might not expand at this stage (without braces)*/
while(mac_cnt && mcs[mac_cnt-1].name == 0)
--mac_cnt;
}
size_t i; int depth = 0;
for(i = 0; i < mac_cnt; ++i) {
if(mcs[i].nest > depth) depth = mcs[i].nest;
}
while(depth > -1) {
for(i = 0; i < mac_cnt; ++i) if(mcs[i].nest == depth) {
struct macro_info *mi = &mcs[i];
tokenizer_rewind(&cwae.t);
size_t j;
struct token utok;
for(j = 0; j < mi->first+1; ++j)
tokenizer_next(&cwae.t, &utok);
struct FILE_container t2 = {0}, tmp = {0};
t2.f = open_memstream(&t2.buf, &t2.len);
if(!expand_macro(cpp, &cwae.t, t2.f, mi->name, rec_level+1, visited))
return 0;
t2.f = freopen_r(t2.f, &t2.buf, &t2.len);
tokenizer_from_file(&t2.t, t2.f);
/* manipulating the stream in case more stuff has been consumed */
off_t cwae_pos = tokenizer_ftello(&cwae.t);
tokenizer_rewind(&cwae.t);
#ifdef DEBUG
dprintf(2, "merging %s with %s\n", cwae.buf, t2.buf);
#endif
int diff = mem_tokenizers_join(&cwae, &t2, &tmp, mi->first, cwae_pos);
free_file_container(&cwae);
free_file_container(&t2);
cwae = tmp;
#ifdef DEBUG
dprintf(2, "result: %s\n", cwae.buf);
#endif
if(diff == 0) continue;
for(j = 0; j < mac_cnt; ++j) {
if(j == i) continue;
struct macro_info *mi2 = &mcs[j];
/* modified element mi can be either inside, after or before
another macro. the after case doesn't affect us. */
if(mi->first >= mi2->first && mi->last <= mi2->last) {
/* inside m2 */
mi2->last += diff;
} else if (mi->first < mi2->first) {
/* before m2 */
mi2->first += diff;
mi2->last += diff;
}
}
}
--depth;
}
tokenizer_rewind(&cwae.t);
while(1) {
struct macro *ma;
tokenizer_next(&cwae.t, &tok);
if(tok.type == TT_EOF) break;
if(tok.type == TT_IDENTIFIER && tokenizer_peek(&cwae.t) == EOF &&
(ma = get_macro(cpp, cwae.t.buf)) && FUNCTIONLIKE(ma) && tchain_parens_follows(cpp, rec_level) != -1
) {
int ret = expand_macro(cpp, &cwae.t, out, cwae.t.buf, rec_level+1, visited);
if(!ret) return ret;
} else
emit_token(out, &tok, cwae.t.buf);
}
free(mcs);
}
free_file_container(&cwae);
cleanup:
for(i=0; i < num_args; i++) {
fclose(argvalues[i].f);
free(argvalues[i].buf);
}
free(argvalues);
return 1;
}
#define TT_LAND TT_CUSTOM+0
#define TT_LOR TT_CUSTOM+1
#define TT_LTE TT_CUSTOM+2
#define TT_GTE TT_CUSTOM+3
#define TT_SHL TT_CUSTOM+4
#define TT_SHR TT_CUSTOM+5
#define TT_EQ TT_CUSTOM+6
#define TT_NEQ TT_CUSTOM+7
#define TT_LT TT_CUSTOM+8
#define TT_GT TT_CUSTOM+9
#define TT_BAND TT_CUSTOM+10
#define TT_BOR TT_CUSTOM+11
#define TT_XOR TT_CUSTOM+12
#define TT_NEG TT_CUSTOM+13
#define TT_PLUS TT_CUSTOM+14
#define TT_MINUS TT_CUSTOM+15
#define TT_MUL TT_CUSTOM+16
#define TT_DIV TT_CUSTOM+17
#define TT_MOD TT_CUSTOM+18
#define TT_LPAREN TT_CUSTOM+19
#define TT_RPAREN TT_CUSTOM+20
#define TT_LNOT TT_CUSTOM+21
#define TTINT(X) X-TT_CUSTOM
#define TTENT(X, Y) [TTINT(X)] = Y
static int bp(int tokentype) {
static const int bplist[] = {
TTENT(TT_LOR, 1 << 4),
TTENT(TT_LAND, 1 << 5),
TTENT(TT_BOR, 1 << 6),
TTENT(TT_XOR, 1 << 7),
TTENT(TT_BAND, 1 << 8),
TTENT(TT_EQ, 1 << 9),
TTENT(TT_NEQ, 1 << 9),
TTENT(TT_LTE, 1 << 10),
TTENT(TT_GTE, 1 << 10),
TTENT(TT_LT, 1 << 10),
TTENT(TT_GT, 1 << 10),
TTENT(TT_SHL, 1 << 11),
TTENT(TT_SHR, 1 << 11),
TTENT(TT_PLUS, 1 << 12),
TTENT(TT_MINUS, 1 << 12),
TTENT(TT_MUL, 1 << 13),
TTENT(TT_DIV, 1 << 13),
TTENT(TT_MOD, 1 << 13),
TTENT(TT_NEG, 1 << 14),
TTENT(TT_LNOT, 1 << 14),
TTENT(TT_LPAREN, 1 << 15),
// TTENT(TT_RPAREN, 1 << 15),
// TTENT(TT_LPAREN, 0),
TTENT(TT_RPAREN, 0),
};
if(TTINT(tokentype) < sizeof(bplist)/sizeof(bplist[0])) return bplist[TTINT(tokentype)];
return 0;
}
static int expr(struct tokenizer *t, int rbp, int *err);
static int charlit_to_int(const char *lit) {
if(lit[1] == '\\') switch(lit[2]) {
case '0': return 0;
case 'n': return 10;
case 't': return 9;
case 'r': return 13;
case 'x': return strtol(lit+3, NULL, 16);
default: return lit[2];
}
return lit[1];
}
static int nud(struct tokenizer *t, struct token *tok, int *err) {
switch((unsigned) tok->type) {
case TT_IDENTIFIER: return 0;
case TT_WIDECHAR_LIT:
case TT_SQSTRING_LIT: return charlit_to_int(t->buf);
case TT_HEX_INT_LIT:
case TT_OCT_INT_LIT:
case TT_DEC_INT_LIT:
return strtol(t->buf, NULL, 0);
case TT_NEG: return ~ expr(t, bp(tok->type), err);
case TT_PLUS: return expr(t, bp(tok->type), err);
case TT_MINUS: return - expr(t, bp(tok->type), err);
case TT_LNOT: return !expr(t, bp(tok->type), err);
case TT_LPAREN: {
int inner = expr(t, 0, err);
if(0!=expect(t, TT_RPAREN, (const char*[]){")", 0}, tok)) {
error("missing ')'", t, tok);
return 0;
}
return inner;
}
case TT_FLOAT_LIT:
error("floating constant in preprocessor expression", t, tok);
*err = 1;
return 0;
case TT_RPAREN:
default:
error("unexpected token", t, tok);
*err = 1;
return 0;
}
}
static int led(struct tokenizer *t, int left, struct token *tok, int *err) {
int right;
switch((unsigned) tok->type) {
case TT_LAND:
case TT_LOR:
right = expr(t, bp(tok->type), err);
if(tok->type == TT_LAND) return left && right;
return left || right;
case TT_LTE: return left <= expr(t, bp(tok->type), err);
case TT_GTE: return left >= expr(t, bp(tok->type), err);
case TT_SHL: return left << expr(t, bp(tok->type), err);
case TT_SHR: return left >> expr(t, bp(tok->type), err);
case TT_EQ: return left == expr(t, bp(tok->type), err);
case TT_NEQ: return left != expr(t, bp(tok->type), err);
case TT_LT: return left < expr(t, bp(tok->type), err);
case TT_GT: return left > expr(t, bp(tok->type), err);
case TT_BAND: return left & expr(t, bp(tok->type), err);
case TT_BOR: return left | expr(t, bp(tok->type), err);
case TT_XOR: return left ^ expr(t, bp(tok->type), err);
case TT_PLUS: return left + expr(t, bp(tok->type), err);
case TT_MINUS:return left - expr(t, bp(tok->type), err);
case TT_MUL: return left * expr(t, bp(tok->type), err);
case TT_DIV:
case TT_MOD:
right = expr(t, bp(tok->type), err);
if(right == 0) {
error("eval: div by zero", t, tok);
*err = 1;
}
else if(tok->type == TT_DIV) return left / right;
else if(tok->type == TT_MOD) return left % right;
return 0;
default:
error("eval: unexpect token", t, tok);
*err = 1;
return 0;
}
}
static int tokenizer_peek_next_non_ws(struct tokenizer *t, struct token *tok)
{
int ret;
while(1) {
ret = tokenizer_peek_token(t, tok);
if(is_whitespace_token(tok))
x_tokenizer_next(t, tok);
else break;
}
return ret;
}
static int expr(struct tokenizer *t, int rbp, int*err) {
struct token tok;
int ret = skip_next_and_ws(t, &tok);
if(tok.type == TT_EOF) return 0;
int left = nud(t, &tok, err);
while(1) {
ret = tokenizer_peek_next_non_ws(t, &tok);
if(bp(tok.type) <= rbp) break;
ret = tokenizer_next(t, &tok);
if(tok.type == TT_EOF) break;
left = led(t, left, &tok, err);
}
(void) ret;
return left;
}
static int do_eval(struct tokenizer *t, int *result) {
tokenizer_register_custom_token(t, TT_LAND, "&&");
tokenizer_register_custom_token(t, TT_LOR, "||");
tokenizer_register_custom_token(t, TT_LTE, "<=");
tokenizer_register_custom_token(t, TT_GTE, ">=");
tokenizer_register_custom_token(t, TT_SHL, "<<");
tokenizer_register_custom_token(t, TT_SHR, ">>");
tokenizer_register_custom_token(t, TT_EQ, "==");
tokenizer_register_custom_token(t, TT_NEQ, "!=");
tokenizer_register_custom_token(t, TT_LT, "<");
tokenizer_register_custom_token(t, TT_GT, ">");
tokenizer_register_custom_token(t, TT_BAND, "&");
tokenizer_register_custom_token(t, TT_BOR, "|");
tokenizer_register_custom_token(t, TT_XOR, "^");
tokenizer_register_custom_token(t, TT_NEG, "~");
tokenizer_register_custom_token(t, TT_PLUS, "+");
tokenizer_register_custom_token(t, TT_MINUS, "-");
tokenizer_register_custom_token(t, TT_MUL, "*");
tokenizer_register_custom_token(t, TT_DIV, "/");
tokenizer_register_custom_token(t, TT_MOD, "%");
tokenizer_register_custom_token(t, TT_LPAREN, "(");
tokenizer_register_custom_token(t, TT_RPAREN, ")");
tokenizer_register_custom_token(t, TT_LNOT, "!");
int err = 0;
*result = expr(t, 0, &err);
#ifdef DEBUG
dprintf(2, "eval result: %d\n", *result);
#endif
return !err;
}
static int evaluate_condition(struct cpp *cpp, struct tokenizer *t, int *result, char *visited[]) {
int ret, backslash_seen = 0;
struct token curr;
char *bufp;
size_t size;
int tflags = tokenizer_get_flags(t);
tokenizer_set_flags(t, tflags | TF_PARSE_WIDE_STRINGS);
ret = tokenizer_next(t, &curr);
if(!ret) return ret;
if(!is_whitespace_token(&curr)) {
error("expected whitespace after if/elif", t, &curr);
return 0;
}
FILE *f = open_memstream(&bufp, &size);
while(1) {
ret = tokenizer_next(t, &curr);
if(!ret) return ret;
if(curr.type == TT_IDENTIFIER) {
if(!expand_macro(cpp, t, f, t->buf, -1, visited)) return 0;
} else if(curr.type == TT_SEP) {
if(curr.value == '\\')
backslash_seen = 1;
else {
if(curr.value == '\n') {
if(!backslash_seen) break;
} else {
emit_token(f, &curr, t->buf);
}
backslash_seen = 0;
}
} else {
emit_token(f, &curr, t->buf);
}
}
f = freopen_r(f, &bufp, &size);
if(!f || size == 0) {
error("#(el)if with no expression", t, &curr);
return 0;
}
#ifdef DEBUG
dprintf(2, "evaluating condition %s\n", bufp);
#endif
struct tokenizer t2;
tokenizer_from_file(&t2, f);
ret = do_eval(&t2, result);
fclose(f);
free(bufp);
tokenizer_set_flags(t, tflags);
return ret;
}
static void free_visited(char *visited[]) {
size_t i;
for(i=0; i< MAX_RECURSION; i++)
if(visited[i]) free(visited[i]);
}
int parse_file(struct cpp *cpp, FILE *f, const char *fn, FILE *out) {
struct tokenizer t;
struct token curr;
tokenizer_init(&t, f, TF_PARSE_STRINGS);
tokenizer_set_filename(&t, fn);
tokenizer_register_marker(&t, MT_MULTILINE_COMMENT_START, "/*"); /**/
tokenizer_register_marker(&t, MT_MULTILINE_COMMENT_END, "*/");
tokenizer_register_marker(&t, MT_SINGLELINE_COMMENT_START, "//");
int ret, newline=1, ws_count = 0;
int if_level = 0, if_level_active = 0, if_level_satisfied = 0;
#define all_levels_active() (if_level_active == if_level)
#define prev_level_active() (if_level_active == if_level-1)
#define set_level(X, V) do { \
if(if_level_active > X) if_level_active = X; \
if(if_level_satisfied > X) if_level_satisfied = X; \
if(V != -1) { \
if(V) if_level_active = X; \
else if(if_level_active == X) if_level_active = X-1; \
if(V && if_level_active == X) if_level_satisfied = X; \
} \
if_level = X; \
} while(0)
#define skip_conditional_block (if_level > if_level_active)
static const char* directives[] = {"include", "error", "warning", "define", "undef", "if", "elif", "else", "ifdef", "ifndef", "endif", "line", "pragma", 0};
while((ret = tokenizer_next(&t, &curr)) && curr.type != TT_EOF) {
newline = curr.column == 0;
if(newline) {
ret = eat_whitespace(&t, &curr, &ws_count);
if(!ret) return ret;
}
if(curr.type == TT_EOF) break;
if(skip_conditional_block && !(newline && is_char(&curr, '#'))) continue;
if(is_char(&curr, '#')) {
if(!newline) {
error("stray #", &t, &curr);
return 0;
}
int index = expect(&t, TT_IDENTIFIER, directives, &curr);
if(index == -1) {
if(skip_conditional_block) continue;
error("invalid preprocessing directive", &t, &curr);
return 0;
}
if(skip_conditional_block) switch(index) {
case 0: case 1: case 2: case 3: case 4:
case 11: case 12:
continue;
default: break;
}
switch(index) {
case 0:
ret = include_file(cpp, &t, out);
if(!ret) return ret;
break;
case 1:
ret = emit_error_or_warning(&t, 1);
if(!ret) return ret;
break;
case 2:
ret = emit_error_or_warning(&t, 0);
if(!ret) return ret;
break;
case 3:
ret = parse_macro(cpp, &t);
if(!ret) return ret;
break;
case 4:
if(!skip_next_and_ws(&t, &curr)) return 0;
if(curr.type != TT_IDENTIFIER) {
error("expected identifier", &t, &curr);
return 0;
}
undef_macro(cpp, t.buf);
break;
case 5: // if
if(all_levels_active()) {
char* visited[MAX_RECURSION] = {0};
if(!evaluate_condition(cpp, &t, &ret, visited)) return 0;
free_visited(visited);
set_level(if_level + 1, ret);
} else {
set_level(if_level + 1, 0);
}
break;
case 6: // elif
if(prev_level_active() && if_level_satisfied < if_level) {
char* visited[MAX_RECURSION] = {0};
if(!evaluate_condition(cpp, &t, &ret, visited)) return 0;
free_visited(visited);
if(ret) {
if_level_active = if_level;
if_level_satisfied = if_level;
}
} else if(if_level_active == if_level) {
--if_level_active;
}
break;
case 7: // else
if(prev_level_active() && if_level_satisfied < if_level) {
if(1) {
if_level_active = if_level;
if_level_satisfied = if_level;
}
} else if(if_level_active == if_level) {
--if_level_active;
}
break;
case 8: // ifdef
case 9: // ifndef
if(!skip_next_and_ws(&t, &curr) || curr.type == TT_EOF) return 0;
ret = !!get_macro(cpp, t.buf);
if(index == 9) ret = !ret;
if(all_levels_active()) {
set_level(if_level + 1, ret);
} else {
set_level(if_level + 1, 0);
}
break;
case 10: // endif
set_level(if_level-1, -1);
break;
case 11: // line
ret = tokenizer_read_until(&t, "\n", 1);
if(!ret) {
error("unknown", &t, &curr);
return 0;
}
break;
case 12: // pragma
emit(out, "#pragma");
while((ret = x_tokenizer_next(&t, &curr)) && curr.type != TT_EOF) {
emit_token(out, &curr, t.buf);
if(is_char(&curr, '\n')) break;
}
if(!ret) return ret;
break;
default:
break;
}
continue;
} else {
while(ws_count) {
emit(out, " ");
--ws_count;
}
}
#if DEBUG
dprintf(2, "(stdin:%u,%u) ", curr.line, curr.column);
if(curr.type == TT_SEP)
dprintf(2, "separator: %c\n", curr.value == '\n'? ' ' : curr.value);
else
dprintf(2, "%s: %s\n", tokentype_to_str(curr.type), t.buf);
#endif
if(curr.type == TT_IDENTIFIER) {
char* visited[MAX_RECURSION] = {0};
if(!expand_macro(cpp, &t, out, t.buf, 0, visited))
return 0;
free_visited(visited);
} else {
emit_token(out, &curr, t.buf);
}
}
if(if_level) {
error("unterminated #if", &t, &curr);
return 0;
}
return 1;
}
struct cpp * cpp_new(void) {
struct cpp* ret = calloc(1, sizeof(struct cpp));
if(!ret) return ret;
tglist_init(&ret->includedirs);
cpp_add_includedir(ret, ".");
ret->macros = hbmap_new(strptrcmp, string_hash, 128);
struct macro m = {.num_args = 1};
add_macro(ret, strdup("defined"), &m);
m.num_args = MACRO_FLAG_OBJECTLIKE;
add_macro(ret, strdup("__FILE__"), &m);
add_macro(ret, strdup("__LINE__"), &m);
return ret;
}
void cpp_free(struct cpp*cpp) {
free_macros(cpp);
tglist_free_values(&cpp->includedirs);
tglist_free_items(&cpp->includedirs);
}
void cpp_add_includedir(struct cpp *cpp, const char* includedir) {
tglist_add(&cpp->includedirs, strdup(includedir));
}
int cpp_add_define(struct cpp *cpp, const char *mdecl) {
struct FILE_container tmp = {0};
tmp.f = open_memstream(&tmp.buf, &tmp.len);
fprintf(tmp.f, "%s\n", mdecl);
tmp.f = freopen_r(tmp.f, &tmp.buf, &tmp.len);
tokenizer_from_file(&tmp.t, tmp.f);
int ret = parse_macro(cpp, &tmp.t);
free_file_container(&tmp);
return ret;
}
int cpp_run(struct cpp *cpp, FILE* in, FILE* out, const char* inname) {
return parse_file(cpp, in, inname, out);
}
| rofl0r/tinycpp |
<|start_filename|>src/main/java/com/mojang/datafixers/OptionalDynamic.java<|end_filename|>
package com.mojang.datafixers;
import com.mojang.datafixers.types.DynamicOps;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
import java.util.stream.Stream;
@SuppressWarnings("unused")
public final class OptionalDynamic<T> extends DynamicLike<T> {
private final Optional<Dynamic<T>> delegate;
public OptionalDynamic(final DynamicOps<T> ops, final Optional<Dynamic<T>> delegate) {
super(ops);
this.delegate = delegate;
}
public Optional<Dynamic<T>> get() {
return delegate;
}
public <U> Optional<U> map(Function<? super Dynamic<T>, ? extends U> mapper) {
return delegate.map(mapper);
}
public <U> Optional<U> flatMap(Function<? super Dynamic<T>, Optional<U>> mapper) {
return delegate.flatMap(mapper);
}
@Override
public Optional<Number> asNumber() {
return flatMap(DynamicLike::asNumber);
}
@Override
public Optional<String> asString() {
return flatMap(DynamicLike::asString);
}
@Override
public Optional<Stream<Dynamic<T>>> asStreamOpt() {
return flatMap(DynamicLike::asStreamOpt);
}
@Override
public Optional<ByteBuffer> asByteBufferOpt() {
return flatMap(DynamicLike::asByteBufferOpt);
}
@Override
public Optional<IntStream> asIntStreamOpt() {
return flatMap(DynamicLike::asIntStreamOpt);
}
@Override
public Optional<LongStream> asLongStreamOpt() {
return flatMap(DynamicLike::asLongStreamOpt);
}
@Override
public OptionalDynamic<T> get(final String key) {
return new OptionalDynamic<>(ops, flatMap(k -> k.get(key).get()));
}
@Override
public Optional<T> getGeneric(final T key) {
return flatMap(v -> v.getGeneric(key));
}
@Override
public Optional<T> getElement(final String key) {
return flatMap(v -> v.getElement(key));
}
@Override
public Optional<T> getElementGeneric(final T key) {
return flatMap(v -> v.getElementGeneric(key));
}
@Override
public <U> Optional<List<U>> asListOpt(final Function<Dynamic<T>, U> deserializer) {
return flatMap(t -> t.asListOpt(deserializer));
}
@Override
public <K, V> Optional<Map<K, V>> asMapOpt(final Function<Dynamic<T>, K> keyDeserializer, final Function<Dynamic<T>, V> valueDeserializer) {
return flatMap(input -> input.asMapOpt(keyDeserializer, valueDeserializer));
}
public Dynamic<T> orElseEmptyMap() {
return delegate.orElseGet(this::emptyMap);
}
public Dynamic<T> orElseEmptyList() {
return delegate.orElseGet(this::emptyList);
}
}
<|start_filename|>src/main/java/com/mojang/datafixers/DynamicLike.java<|end_filename|>
package com.mojang.datafixers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.mojang.datafixers.types.DynamicOps;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
import java.util.stream.Stream;
@SuppressWarnings("unused")
public abstract class DynamicLike<T> {
protected final DynamicOps<T> ops;
public DynamicLike(final DynamicOps<T> ops) {
this.ops = ops;
}
public DynamicOps<T> getOps() {
return ops;
}
public abstract Optional<Number> asNumber();
public abstract Optional<String> asString();
public abstract Optional<Stream<Dynamic<T>>> asStreamOpt();
public abstract Optional<ByteBuffer> asByteBufferOpt();
public abstract Optional<IntStream> asIntStreamOpt();
public abstract Optional<LongStream> asLongStreamOpt();
public abstract OptionalDynamic<T> get(String key);
public abstract Optional<T> getGeneric(T key);
public abstract Optional<T> getElement(String key);
public abstract Optional<T> getElementGeneric(T key);
public abstract <U> Optional<List<U>> asListOpt(Function<Dynamic<T>, U> deserializer);
public abstract <K, V> Optional<Map<K, V>> asMapOpt(Function<Dynamic<T>, K> keyDeserializer, Function<Dynamic<T>, V> valueDeserializer);
public Number asNumber(final Number defaultValue) {
return asNumber().orElse(defaultValue);
}
public int asInt(final int defaultValue) {
return asNumber(defaultValue).intValue();
}
public long asLong(final long defaultValue) {
return asNumber(defaultValue).longValue();
}
public float asFloat(final float defaultValue) {
return asNumber(defaultValue).floatValue();
}
public double asDouble(final double defaultValue) {
return asNumber(defaultValue).doubleValue();
}
public byte asByte(final byte defaultValue) {
return asNumber(defaultValue).byteValue();
}
public short asShort(final short defaultValue) {
return asNumber(defaultValue).shortValue();
}
public boolean asBoolean(final boolean defaultValue) {
return asNumber(defaultValue ? 1 : 0).intValue() != 0;
}
public String asString(final String defaultValue) {
return asString().orElse(defaultValue);
}
public Stream<Dynamic<T>> asStream() {
return asStreamOpt().orElseGet(Stream::empty);
}
public ByteBuffer asByteBuffer() {
return asByteBufferOpt().orElseGet(() -> ByteBuffer.wrap(new byte[0]));
}
public IntStream asIntStream() {
return asIntStreamOpt().orElseGet(IntStream::empty);
}
public LongStream asLongStream() {
return asLongStreamOpt().orElseGet(LongStream::empty);
}
public <U> List<U> asList(final Function<Dynamic<T>, U> deserializer) {
return asListOpt(deserializer).orElseGet(ImmutableList::of);
}
public <K, V> Map<K, V> asMap(final Function<Dynamic<T>, K> keyDeserializer, final Function<Dynamic<T>, V> valueDeserializer) {
return asMapOpt(keyDeserializer, valueDeserializer).orElseGet(ImmutableMap::of);
}
public T getElement(final String key, final T defaultValue) {
return getElement(key).orElse(defaultValue);
}
public T getElementGeneric(final T key, final T defaultValue) {
return getElementGeneric(key).orElse(defaultValue);
}
public Dynamic<T> emptyList() {
return new Dynamic<>(ops, ops.emptyList());
}
public Dynamic<T> emptyMap() {
return new Dynamic<>(ops, ops.emptyMap());
}
public Dynamic<T> createNumeric(final Number i) {
return new Dynamic<>(ops, ops.createNumeric(i));
}
public Dynamic<T> createByte(final byte value) {
return new Dynamic<>(ops, ops.createByte(value));
}
public Dynamic<T> createShort(final short value) {
return new Dynamic<>(ops, ops.createShort(value));
}
public Dynamic<T> createInt(final int value) {
return new Dynamic<>(ops, ops.createInt(value));
}
public Dynamic<T> createLong(final long value) {
return new Dynamic<>(ops, ops.createLong(value));
}
public Dynamic<T> createFloat(final float value) {
return new Dynamic<>(ops, ops.createFloat(value));
}
public Dynamic<T> createDouble(final double value) {
return new Dynamic<>(ops, ops.createDouble(value));
}
public Dynamic<T> createBoolean(final boolean value) {
return new Dynamic<>(ops, ops.createBoolean(value));
}
public Dynamic<T> createString(final String value) {
return new Dynamic<>(ops, ops.createString(value));
}
public Dynamic<T> createList(final Stream<? extends Dynamic<?>> input) {
return new Dynamic<>(ops, ops.createList(input.map(element -> element.cast(ops))));
}
public Dynamic<T> createMap(final Map<? extends Dynamic<?>, ? extends Dynamic<?>> map) {
final ImmutableMap.Builder<T, T> builder = ImmutableMap.builder();
for (final Map.Entry<? extends Dynamic<?>, ? extends Dynamic<?>> entry : map.entrySet()) {
builder.put(entry.getKey().cast(ops), entry.getValue().cast(ops));
}
return new Dynamic<>(ops, ops.createMap(builder.build()));
}
public Dynamic<?> createByteList(final ByteBuffer input) {
return new Dynamic<>(ops, ops.createByteList(input));
}
public Dynamic<?> createIntList(final IntStream input) {
return new Dynamic<>(ops, ops.createIntList(input));
}
public Dynamic<?> createLongList(final LongStream input) {
return new Dynamic<>(ops, ops.createLongList(input));
}
}
| ch99q/DataFixerUpper |
<|start_filename|>docs/src/components/Hero/styles.module.css<|end_filename|>
/* stylelint-disable docusaurus/copyright-header */
/**
* CSS files with the .module.css suffix will be treated as CSS modules
* and scoped locally.
*/
.heroBanner {
background-color: var(--almost-black);
flex: 1;
padding: 2rem 0;
position: relative;
}
.heroSubtitle {
color: #fff;
line-height: 1.25;
}
.heroBanner :global(.row) {
align-items: flex-end;
}
.buttons {
display: flex;
flex-direction: column;
}
.exampleColumn {
margin-top: 2rem;
}
@media screen and (min-width: 480px) {
.buttons {
flex-direction: row;
}
.secondaryButton {
margin-left: 1rem;
}
}
@media screen and (min-width: 997px) {
.heroBanner {
padding: 8rem;
}
.exampleColumn {
margin-top: 0;
}
}
<|start_filename|>docs/src/css/custom.css<|end_filename|>
/* stylelint-disable docusaurus/copyright-header */
/**
* Any CSS included here will be global. The classic template
* bundles Infima by default. Infima is a CSS framework designed to
* work well for content-centric websites.
*/
/* You can override the default Infima variables here. */
:root {
--rich-black: #070a0d;
--almost-black: #0d1117;
--primary: #08ffc8;
--ifm-color-primary: var(--primary);
--ifm-color-primary-dark: #00f5c0;
--ifm-color-primary-darker: #00e0b0;
--ifm-color-primary-darkest: #00cca0;
--ifm-color-primary-light: #1fffce;
--ifm-color-primary-lighter: #33ffd3;
--ifm-color-primary-lightest: #47ffd7;
--ifm-code-font-size: 95%;
--ifm-font-color-base: #0d1117;
--ifm-font-color-base-inverse: #fff;
--ifm-navbar-background-color: var(--almost-black);
--ifm-navbar-link-color: #fff;
--ifm-navbar-link-hover-color: var(--primary);
--ifm-link-color: var(--ifm-color-primary-darkest);
--ifm-link-hover-color: var(--ifm-color-primary-dark);
--ifm-menu-color-active: var(--ifm-color-primary-darkest);
}
html[data-theme="dark"] {
--ifm-font-color-base: #fff;
--ifm-font-color-base-inverse: #0d1117;
--ifm-navbar-link-hover-color: var(--primary);
--ifm-link-color: var(--ifm-color-primary);
--ifm-link-hover-color: var(--ifm-color-primary-dark);
--ifm-menu-color-active: var(--ifm-color-primary);
}
.docusaurus-highlight-code-line {
background-color: rgba(0, 0, 0, 0.1);
display: block;
margin: 0 calc(-1 * var(--ifm-pre-padding));
padding: 0 var(--ifm-pre-padding);
}
html[data-theme="dark"] .docusaurus-highlight-code-line {
background-color: rgba(0, 0, 0, 0.3);
}
.main-wrapper {
display: flex;
flex-direction: column;
}
.button {
color: var(--almost-black);
}
.button.button--secondary {
color: var(--almost-black);
}
.button--secondary {
--ifm-button-border-color: #fff;
}
.button--secondary:not(.button--outline) {
--ifm-button-background-color: #fff;
}
.navbar__logo {
height: 1.35rem;
}
.navbar__brand {
color: var(--primary);
}
.navbar__brand:hover {
color: #fff;
}
.navbar__toggle {
color: #fff;
}
.table-of-contents__link--active {
color: var(--ifm-color-primary-darkest);
}
html[data-theme="dark"] .table-of-contents__link--active {
color: var(--ifm-color-primary);
}
<|start_filename|>docs/docusaurus.config.js<|end_filename|>
const prismTheme = require("./src/prism-theme");
/** @type {import('@docusaurus/types').DocusaurusConfig} */
module.exports = {
title: "slippers",
tagline:
"Build reusable components in Django without writing a single line of Python.",
url: "https://mitchel.me",
baseUrl: "/slippers/",
onBrokenLinks: "throw",
onBrokenMarkdownLinks: "warn",
favicon: "img/favicon.ico",
organizationName: "mixxorz",
projectName: "slippers",
trailingSlash: true,
themeConfig: {
image: "https://repository-images.githubusercontent.com/384881226/b3f17d7e-ba55-4470-b1ea-4c65c4ab2cde",
navbar: {
title: "",
logo: {
alt: "slippers",
src: "img/slippers.svg",
},
items: [
{
href: "/docs/introduction/",
label: "Docs",
},
{
href: "https://github.com/mixxorz/slippers",
label: "GitHub",
position: "right",
},
],
},
footer: {
style: "light",
links: [
{
title: "Docs",
items: [
{
label: "Introduction",
to: "/docs/introduction",
},
{
label: "Installation",
to: "/docs/installation",
},
{
label: "Getting started",
to: "/docs/getting-started",
},
],
},
{
title: "More",
items: [
{
label: "GitHub",
href: "https://github.com/mixxorz/slippers",
},
{
label: "Twitter",
href: "https://twitter.com/mixxorz",
},
],
},
],
},
prism: {
theme: prismTheme,
darkTheme: prismTheme,
additionalLanguages: ["django", "twig"],
},
},
presets: [
[
"@docusaurus/preset-classic",
{
docs: {
sidebarPath: require.resolve("./sidebars.js"),
// Please change this to your repo.
editUrl:
"https://github.com/mixxorz/slippers/edit/main/docs/",
},
theme: {
customCss: require.resolve("./src/css/custom.css"),
},
},
],
],
};
| mixxorz/slippers |
<|start_filename|>public/plugins/popdown/lib/jquery.popdown.js<|end_filename|>
/*!
* Simple jQuery Popdown Plugin & Content Loader
*
* @author : http://twitter.com/SeanNieuwoudt
* @author : http://twitter.com/wixelhq
* @url : http://github.com/Wixel/jquery-popdown.git
* @copyright: 2015 Wixel
* @license : MIT license
* @version : 1.1
*/
;(function($){
/**
* Generate & display the popdown
*
* @param string uri (content to load)
* @param object options
* @return void
*/
$.fn.show_popdown = function(uri, options) {
// Remove previous containers if they exist
if($('#popdown-opacity').length > 0) {
$('#popdown-opacity').remove();
}
// Construct the background blend
opacity = $('<div />').attr('id', 'popdown-opacity').css({
position: 'absolute',
top : 0,
left : 0,
width : $(document).outerWidth(true),
height : $(document).outerHeight(true),
zIndex : 99998,
display : 'none'
});
// Construct the content container
container = $('<div class="popdown-loading" />').attr('id', 'popdown-dialog').css({
maxWidth : options.width,
height : options.height,
zIndex : 99999,
margin : '0 auto',
position: 'relative',
display : 'none'
});
// Let's add the opacity to the doc body
$('body').append(opacity)
// Fade in the background blend & add content container
$('#popdown-opacity').fadeIn(100).append(container);
// Fade in the container and load the data
$('#popdown-opacity').append(container).stop().animate({
opacity: 1.0
}, 100, function() {
$('#popdown-dialog').fadeIn(50, function(){
$.get(uri, function(resp) {
$('#popdown-dialog').html(resp).addClass('popdown-done').removeClass('popdown-loading');
$("html, body").animate({ scrollTop: 0 }, "fast");
});
});
});
}
/**
* Close the popdown and remove it from the DOM
*
* @return void
*/
$.fn.close_popdown = function() {
if($('#popdown-opacity').length > 0) {
$('#popdown-dialog').stop().animate({
opacity:0,
height:0
}, 200, function(){
$('#popdown-opacity').remove();
});
}
}
/**
* Initialize the popdown plugin
*
* @return void
*/
$.fn.popdown = function(options) {
var defaults = {
width :610,
height:'auto'
};
var options = $.extend(defaults, options);
// Re-size the opacity when the window is resized
$(window).resize(function() {
if($('#popdown-opacity').length > 0) {
$('#popdown-opacity').css({
width : $(document).outerWidth(),
height: $(document).outerHeight()
});
}
});
// Bind the document ESC key
$(document).keyup(function(e){
if(e.keyCode === 27) {
$.fn.close_popdown();
}
});
// General element to close the popdown
$(document).on('click', '.close-popdown', function(e){
if(!$(this).is('.close-popdown')) { //Only close when someone click on the html element with close-popdown class
e.preventDefault();
}
$.fn.close_popdown();
});
// Close popdown when user clicks outside its container
$(document).click(function(event) {
if(!$(event.target).closest('#popdown-dialog').length) {
if($('#popdown-dialog').is(":visible")) {
$.fn.close_popdown();
}
}
});
// Bind to each matching element
return this.each(function() {
var self = $(this);
self.bind('click', function(e){
if(self.is('a')) {
e.preventDefault();
}
if($('#popdown-opacity').is(':visible')) {
$.fn.close_popdown();
} else {
if(self.data('uri')) {
$.fn.show_popdown(self.data('uri'), options);
} else if(self.attr('href')) {
$.fn.show_popdown(self.attr('href'), options);
} else {
alert("No popdown dialog set for this action.");
}
}
});
});
};
})(jQuery);
| binhpt2612/simple-blog-interview |
<|start_filename|>source/assets/javascripts/all.js<|end_filename|>
require('./_font-loader.js');
require('./_alert.js');
<|start_filename|>source/assets/javascripts/_alert.js<|end_filename|>
window.setTimeout(function() {
$(".alert-success:visible, .alert-danger:visible").fadeTo(500, 0).slideUp(500, function(){
$(this).remove();
});
}, 6000); | AdrianChingHC/adrianchinghc.github.io |
<|start_filename|>testapp/templates/delete_thread.html<|end_filename|>
{% extends "base.html" %}
{% import "macros.html" as macros %}
{% block title %}Delete Thread?{% endblock %}
{% block content %}
<h2>Delete Thread: {{ thread.subject }}</h2>
<p>
<small>
Started @ {{ macros.time_tag(thread.timestamp) }}<br>
{{ macros.backlink() }}<br>
{{ macros.backlink(url_for("comments", thread_uid=thread_uid), "thread: " + thread.subject) }}
</small>
</p>
<hr>
<form action="{{ url_for("delete_thread", thread_uid=thread_uid) }}" method="post">
<fieldset>
<legend>Confirm Deleting Thread</legend>
<p>
If you sure to delete "{{ thread.subject }}" thread type "confirm" in
input below<br>
<input autocomplete="off" autofocus name="confirm" required type="text"
size="40">
</p>
<p>
<button type="submit"><strong>Submit</strong></button>
</p>
</fieldset>
</form>
{% endblock %}
<|start_filename|>testapp/templates/error.html<|end_filename|>
{% extends "base.html" %}
{% block title %}{{ message }}{% endblock %}
{% block content %}
<h2>{{ message }}</h2>
<p>
<small>
<a href="{{ url_for("index") }}">« To index page</a><br>
<a href="javascript:history.back();">« To previous page</a>
</small>
</p>
{% if trace %}
<hr>
<h3>Traceback</h3>
<pre>{{ trace }}</pre>
{% endif %}
{% endblock %}
<|start_filename|>testapp/templates/macros.html<|end_filename|>
{% macro backlink(url="", title="") -%}
{% if not url and not title %}
<a href="{{ url_for("threads") }}">« Return to all threads</a>
{% else %}
<a href="{{ url }}">« Return to {{ title }}</a>
{% endif %}
{%- endmacro %}
{% macro time_tag(timestamp) -%}
{% set dt = fromtimestamp(float(timestamp)) %}
<time datetime="{{ dt.isoformat() }}">{{ dt|format("%H:%M, %b %d, %Y") }}</time>
{%- endmacro %}
<|start_filename|>testapp/templates/comments.html<|end_filename|>
{% extends "base.html" %}
{% import "macros.html" as macros %}
{% block title %}{{ thread.subject }}{% endblock %}
{% block content %}
<h2>{{ thread.subject }} ({{ thread.comments_counter }})</h2>
<p>
<small>
Started by <strong>{{ thread.author }}</strong> @ {{ macros.time_tag(thread.timestamp) }}.
{% if g.username == thread.author %}
<a href="{{ url_for("delete_thread", thread_uid=thread_uid) }}" style="color: darkred;">Delete</a>
{% endif %}
<br>
{{ macros.backlink() }}
</small>
</p>
<hr>
{% for comment_uid, comment in iteritems(comments) %}
<p id="{{ comment_uid }}">
<span style="white-space: pre;">{{ comment.text }}</span><br>
<small>
<strong>{{ comment.author }}</strong> @ {{ macros.time_tag(comment.timestamp) }}
</small>
</p>
{% endfor %}
<hr>
<form action="{{ url_for("comment", thread_uid=thread_uid) }}" method="post">
<fieldset>
<legend>Add New Comment</legend>
<p>
Logged in as <strong>{{ g.username }}</strong><br>
<small>
Not you? <a href="{{ url_for("quit") }}">Quit</a>
</small>
</p>
<p>
<textarea autofocus cols="80" id="text" name="text"
placeholder="Text..." required rows="5"></textarea>
</p>
<p>
<button type="submit"><strong>Submit</strong></button>
</p>
</fieldset>
</form>
{% endblock %}
<|start_filename|>testapp/Makefile<|end_filename|>
.PHONY: bootstrap clean distclean pep8 server shell test wipe
# Project and environment settings
APP = flask_redis
PROJECT = .
ENV ?= env
VENV = $(shell python -c 'import sys; print(int(hasattr(sys, "real_prefix")));')
ifeq ($(VENV),1)
PYTHON = python
TOX = tox
else
PYTHON = $(ENV)/bin/python
TOX = $(ENV)/bin/tox
endif
# Server settings
SERVER_HOST ?= 0.0.0.0
SERVER_PORT ?= 8300
# Test settings
ifneq ($(TOXENV),)
tox_args = -e $(TOXENV)
endif
clean:
find . -name "*.pyc" -delete
find . -type d -empty -delete
find . -name __pycache__ -type d -exec rm -rf {} 2> /dev/null +
distclean: clean
rm -rf ../build ../dist $(ENV)/
install: .install
.install: requirements.txt
bootstrapper -e $(ENV)/
touch $@
lint: .install
TOXENV=flake8 $(MAKE) test
server: .install
$(PYTHON) app.py $(SERVER_HOST):$(SERVER_PORT)
shell:
$(PYTHON) -m ipython
test: .install
$(TOX) $(tox_args) $(TOX_ARGS) -- $(TEST_ARGS)
wipe: .install
$(PYTHON) wipe.py
<|start_filename|>testapp/templates/index.html<|end_filename|>
{% extends "base.html" %}
{% block title %}Comments App{% endblock %}
{% block content %}
<form action="{{ url_for("index") }}" method="post">
<fieldset>
<legend>Select your username</legend>
<p>
<label for="username"><strong>Username</strong></label><br>
<input autocomplete="off" autofocus id="username" name="username"
required size="80"><br>
{% if error %}
<strong>Invalid value for username. Please try again.</strong><br>
{% endif %}
<small>Username should have at least 2 chars.</small>
</p>
<p>
<button type="submit"><strong>Submit</strong></button>
</p>
</fieldset>
</form>
{% endblock %}
<|start_filename|>testapp/templates/base.html<|end_filename|>
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>{% block title %}{% endblock %} · Flask-And-Redis</title>
<style type="text/css">
body {
margin: 20px;
}
body, button, input, textarea {
color: #111;
font-family: "Dejavu Sans Mono", "Monaco", monospace;
}
fieldset legend {
font-weight: bold;
padding: 0 10px;
}
footer {
color: #999;
font-size: small;
margin-top: 32px;
}
footer a {
color: #999;
}
footer a:focus, footer a:hover {
color: #111;
}
footer p {
margin: 0;
}
hr {
margin: 32px 0;
}
ul.messages {
color: #555;
font-size: small;
}
.github-ribbon img {
border: 0;
position: absolute;
right: 0;
top: 0;
}
</style>
{% block extra_head %}{% endblock %}
</head>
<body>
<header>
<h1>Comments App</h1>
<div class="github">
<a class="github-ribbon" href="https://github.com/playpauseandstop/Flask-And-Redis" title="Fork me on GitHub">
<img src="{{ url_for("static", filename="img/github.png") }}" alt="Fork me on GitHub">
</a>
</div>
</header>
{% with messages = get_flashed_messages(with_categories=True) %}
{% if messages %}
<ul class="messages">
{% for category, message in messages %}
<li class="{{ category }}">{{ message }}</li>
{% endfor %}
</ul>
{% endif %}
{% endwith %}
{% block content %}{% endblock %}
<footer>
<p>
This is test application for
<a href="https://github.com/playpauseandstop/Flask-And-Redis">Flask-And-Redis</a>
extension. Source code available on GitHub.
</p>
<p><small>All data wiped every 12 hours.</small></p>
</footer>
{% block extra_body %}{% endblock %}
</body>
</html>
<|start_filename|>testapp/static/css/screen.css<|end_filename|>
body, input, table, td, textarea, th {
color: #222;
font-family: "Droid Mono", "Menlo", monospace;
font-size: 12px;
line-height: 16px;
}
hr {
border-bottom: 1px solid #eee;
border-top: 1px solid #666;
margin: 2em 0;
}
#server-info table,
#server-test fieldset {
border: 2px solid #999;
padding: 4px 8px;
}
#server-info table {
width: 100%;
}
#server-info th {
font-size: 16px;
padding-top: 8px;
text-align: left;
}
#server-test input[type="submit"] {
border: 1px solid #ccc;
background-color: #eee;
cursor: pointer;
font-size: 14px;
font-weight: bold;
padding: 2px 12px;
}
#server-test input[type="submit"]:hover {
text-decoration: underline;
}
#server-test label {
font-size: 16px;
font-weight: bold;
}
.github-ribbon img {
border: 0;
position: absolute;
right: 0;
top: 0;
}
<|start_filename|>testapp/templates/threads.html<|end_filename|>
{% extends "base.html" %}
{% import "macros.html" as macros %}
{% block title %}Available Threads{% endblock %}
{% block content %}
{% if threads %}
<h2>Available Threads</h2>
<dl>
{% for thread_uid, thread in iteritems(threads) %}
<dt>
<strong>
<a href="{{ url_for("comments", thread_uid=thread_uid) }}">{{ thread.subject }}</a>
{% if thread.comments_counter %}({{ thread.comments_counter }}){% endif %}
</strong>
{% if thread.author == g.username %}
—
<small>
<a href="{{ url_for("delete_thread", thread_uid=thread_uid) }}" style="color: darkred;">Delete</a>
</small>
{% endif %}
</dt>
<dd>
<small>
Started by <strong>{{ thread.author }}</strong>
@ {{ macros.time_tag(thread.timestamp) }}{% if thread.last_comment %}.
Last comment by <strong>{{ thread.last_comment.author }}</strong>
@ {{ macros.time_tag(thread.last_comment.timestamp) }}{% endif %}
</small>
</dd>
{% endfor %}
</dl>
{% else %}
<h2>No Threads Available</h2>
<p>
Use form below to start new Thread.
</p>
{% endif %}
<hr>
<form action="{{ url_for("start_thread") }}" method="post">
<fieldset>
<legend>Start new Thread</legend>
<p>
Logged in as <strong>{{ g.username }}</strong><br>
<small>Not you? <a href="{{ url_for("quit") }}">Quit</a></small>
</p>
<p>
<label for="subject"><strong>Subject</strong></label><br>
<input autocomplete="off"{% if len(threads) < 5 %} autofocus{% endif %}
id="subject" name="subject" required size="80" type="text">
</p>
<p>
<label for="comment">Comment</label><br>
<textarea cols="80" id="comment" name="comment" rows="5"></textarea>
</p>
<p>
<button type="submit"><strong>Submit</strong></button>
</p>
</fieldset>
</form>
{% endblock %}
| liorsbg/Flask-And-Redis |
<|start_filename|>gravitee-apim-repository/gravitee-apim-repository-mongodb/src/main/java/io/gravitee/repository/mongodb/management/internal/application/ApplicationMongoRepositoryImpl.java<|end_filename|>
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.repository.mongodb.management.internal.application;
import static org.springframework.data.domain.Sort.Direction.ASC;
import static org.springframework.data.mongodb.core.query.Criteria.where;
import io.gravitee.common.data.domain.Page;
import io.gravitee.repository.management.api.search.ApplicationCriteria;
import io.gravitee.repository.management.api.search.Order;
import io.gravitee.repository.management.api.search.Pageable;
import io.gravitee.repository.management.api.search.Sortable;
import io.gravitee.repository.mongodb.management.internal.model.ApplicationMongo;
import io.gravitee.repository.mongodb.utils.FieldUtils;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
/**
* @author <NAME> (yann.tavernier at graviteesource.com)
* @author GraviteeSource Team
*/
public class ApplicationMongoRepositoryImpl implements ApplicationMongoRepositoryCustom {
@Autowired
private MongoTemplate mongoTemplate;
@Override
public Page<ApplicationMongo> search(final ApplicationCriteria criteria, final Pageable pageable) {
final Query query = new Query();
query.fields().exclude("background");
query.fields().exclude("picture");
if (criteria != null) {
if (criteria.getIds() != null && !criteria.getIds().isEmpty()) {
query.addCriteria(where("id").in(criteria.getIds()));
}
if (criteria.getEnvironmentIds() != null) {
query.addCriteria(where("environmentId").in(criteria.getEnvironmentIds()));
}
if (criteria.getName() != null && !criteria.getName().isEmpty()) {
query.addCriteria(where("name").regex(criteria.getName(), "i"));
}
if (criteria.getStatus() != null) {
query.addCriteria(where("status").is(criteria.getStatus()));
}
}
query.with(Sort.by(ASC, "name"));
long total = mongoTemplate.count(query, ApplicationMongo.class);
if (pageable != null) {
query.with(PageRequest.of(pageable.pageNumber(), pageable.pageSize()));
}
List<ApplicationMongo> apps = mongoTemplate.find(query, ApplicationMongo.class);
return new Page<>(apps, pageable != null ? pageable.pageNumber() : 0, pageable != null ? pageable.pageSize() : 0, total);
}
@Override
public Set<ApplicationMongo> findByIds(List<String> ids, Sortable sortable) {
Query query = new Query();
query.addCriteria(Criteria.where("id").in(ids));
if (sortable != null && StringUtils.isNotEmpty(sortable.field())) {
query.with(Sort.by(Order.DESC.equals(sortable.order()) ? Sort.Direction.DESC : ASC, FieldUtils.toCamelCase(sortable.field())));
}
List<ApplicationMongo> applications = mongoTemplate.find(query, ApplicationMongo.class);
return new LinkedHashSet<>(applications);
}
}
<|start_filename|>gravitee-apim-gateway/gravitee-apim-gateway-policy/src/main/java/io/gravitee/gateway/policy/impl/ConditionalExecutablePolicy.java<|end_filename|>
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.gateway.policy.impl;
import io.gravitee.common.http.HttpHeaders;
import io.gravitee.el.exceptions.ExpressionEvaluationException;
import io.gravitee.gateway.api.ExecutionContext;
import io.gravitee.gateway.api.buffer.Buffer;
import io.gravitee.gateway.api.stream.ReadWriteStream;
import io.gravitee.gateway.core.condition.ConditionEvaluator;
import io.gravitee.gateway.core.condition.ExpressionLanguageStringConditionEvaluator;
import io.gravitee.gateway.policy.PolicyException;
import io.gravitee.policy.api.PolicyChain;
import java.lang.reflect.Method;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author GraviteeSource Team
*/
public class ConditionalExecutablePolicy extends ExecutablePolicy {
public static final Logger LOGGER = LoggerFactory.getLogger(ConditionalExecutablePolicy.class);
private final ConditionEvaluator<String> conditionEvaluator;
private final String condition;
public ConditionalExecutablePolicy(String id, Object policy, Method headMethod, Method streamMethod, String condition) {
super(id, policy, headMethod, streamMethod);
this.conditionEvaluator = new ExpressionLanguageStringConditionEvaluator();
this.condition = condition;
}
@Override
public void execute(PolicyChain chain, ExecutionContext context) throws PolicyException {
boolean isConditionTruthy = evaluateCondition(context);
if (isConditionTruthy) {
super.execute(chain, context);
} else {
chain.doNext(context.request(), context.response());
}
}
@Override
public ReadWriteStream<Buffer> stream(PolicyChain chain, ExecutionContext context) throws PolicyException {
boolean isConditionTruthy = evaluateCondition(context);
ReadWriteStream<Buffer> stream = null;
if (isConditionTruthy) {
stream = super.stream(chain, context);
}
return stream;
}
private boolean evaluateCondition(ExecutionContext context) throws PolicyException {
boolean isConditionTruthy;
try {
isConditionTruthy = conditionEvaluator.evaluate(context, condition);
} catch (RuntimeException e) {
// Catching all RuntimeException to catch those thrown by spring-expression without adding dependency to it
LOGGER.error("Condition evaluation fails for policy {}", this.id(), e);
throw new PolicyException("Request failed unintentionally", e);
}
return isConditionTruthy;
}
}
<|start_filename|>gravitee-apim-gateway/gravitee-apim-gateway-handlers/gravitee-apim-gateway-handlers-api/src/main/java/io/gravitee/gateway/handlers/api/ApiContextHandlerFactory.java<|end_filename|>
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.gateway.handlers.api;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.gravitee.gateway.api.endpoint.resolver.EndpointResolver;
import io.gravitee.gateway.connector.ConnectorRegistry;
import io.gravitee.gateway.core.classloader.DefaultClassLoader;
import io.gravitee.gateway.core.component.ComponentProvider;
import io.gravitee.gateway.core.component.CompositeComponentProvider;
import io.gravitee.gateway.core.component.CustomComponentProvider;
import io.gravitee.gateway.core.endpoint.GroupManager;
import io.gravitee.gateway.core.endpoint.factory.EndpointFactory;
import io.gravitee.gateway.core.endpoint.factory.impl.EndpointFactoryImpl;
import io.gravitee.gateway.core.endpoint.lifecycle.GroupLifecycleManager;
import io.gravitee.gateway.core.endpoint.lifecycle.impl.DefaultGroupLifecycleManager;
import io.gravitee.gateway.core.endpoint.ref.ReferenceRegister;
import io.gravitee.gateway.core.endpoint.ref.impl.DefaultReferenceRegister;
import io.gravitee.gateway.core.endpoint.resolver.ProxyEndpointResolver;
import io.gravitee.gateway.core.invoker.InvokerFactory;
import io.gravitee.gateway.env.GatewayConfiguration;
import io.gravitee.gateway.flow.policy.PolicyChainFactory;
import io.gravitee.gateway.handlers.api.context.ApiTemplateVariableProvider;
import io.gravitee.gateway.handlers.api.definition.Api;
import io.gravitee.gateway.handlers.api.policy.security.PlanBasedAuthenticationHandlerEnhancer;
import io.gravitee.gateway.handlers.api.processor.OnErrorProcessorChainFactory;
import io.gravitee.gateway.handlers.api.processor.RequestProcessorChainFactory;
import io.gravitee.gateway.handlers.api.processor.ResponseProcessorChainFactory;
import io.gravitee.gateway.policy.PolicyChainProviderLoader;
import io.gravitee.gateway.policy.PolicyConfigurationFactory;
import io.gravitee.gateway.policy.PolicyFactory;
import io.gravitee.gateway.policy.PolicyManager;
import io.gravitee.gateway.policy.impl.CachedPolicyConfigurationFactory;
import io.gravitee.gateway.policy.impl.PolicyFactoryCreator;
import io.gravitee.gateway.reactor.handler.ReactorHandler;
import io.gravitee.gateway.reactor.handler.ReactorHandlerFactory;
import io.gravitee.gateway.reactor.handler.context.ApiTemplateVariableProviderFactory;
import io.gravitee.gateway.reactor.handler.context.ExecutionContextFactory;
import io.gravitee.gateway.resource.ResourceConfigurationFactory;
import io.gravitee.gateway.resource.ResourceLifecycleManager;
import io.gravitee.gateway.resource.internal.ResourceConfigurationFactoryImpl;
import io.gravitee.gateway.resource.internal.ResourceManagerImpl;
import io.gravitee.gateway.security.core.*;
import io.gravitee.node.api.Node;
import io.gravitee.node.api.configuration.Configuration;
import io.gravitee.plugin.core.api.ConfigurablePluginManager;
import io.gravitee.plugin.policy.PolicyClassLoaderFactory;
import io.gravitee.plugin.policy.PolicyPlugin;
import io.gravitee.plugin.resource.ResourceClassLoaderFactory;
import io.gravitee.plugin.resource.ResourcePlugin;
import io.gravitee.resource.api.ResourceManager;
import io.vertx.core.Vertx;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.core.ResolvableType;
/**
* @author <NAME> (david.brassely at graviteesource.com)
* @author GraviteeSource Team
*/
public class ApiContextHandlerFactory implements ReactorHandlerFactory<Api> {
private final Logger logger = LoggerFactory.getLogger(ApiContextHandlerFactory.class);
@Autowired
private ApplicationContext applicationContext;
@Value("${reporters.logging.max_size:-1}")
private int maxSizeLogMessage;
@Value("${reporters.logging.excluded_response_types:#{null}}")
private String excludedResponseTypes;
@Value("${handlers.request.headers.x-forwarded-prefix:false}")
private boolean overrideXForwardedPrefix;
@Value("${classloader.legacy.enabled:true}")
private boolean classLoaderLegacyMode;
@Autowired
private Node node;
@Override
public ReactorHandler create(Api api) {
try {
if (api.isEnabled()) {
Class<?> handlerClass = this.getClass().getClassLoader().loadClass(ApiReactorHandler.class.getName());
final ApiReactorHandler handler = (ApiReactorHandler) handlerClass.getConstructor(Api.class).newInstance(api);
final ComponentProvider globalComponentProvider = applicationContext.getBean(ComponentProvider.class);
final CustomComponentProvider customComponentProvider = new CustomComponentProvider();
final ResourceLifecycleManager resourceLifecycleManager = resourceLifecycleManager(
api,
applicationContext.getBean(ResourceClassLoaderFactory.class),
resourceConfigurationFactory(),
applicationContext
);
customComponentProvider.add(ResourceManager.class, resourceLifecycleManager);
customComponentProvider.add(io.gravitee.definition.model.Api.class, api);
final CompositeComponentProvider apiComponentProvider = new CompositeComponentProvider(
customComponentProvider,
globalComponentProvider
);
// Force creation of a dedicated PolicyFactory for each api as it may involve cache we want to be released when api is undeployed.
final PolicyFactory policyFactory = applicationContext.getBean(PolicyFactoryCreator.class).getObject();
final PolicyManager policyManager = policyManager(
api,
policyFactory,
policyConfigurationFactory(),
applicationContext.getBean(PolicyClassLoaderFactory.class),
resourceLifecycleManager,
apiComponentProvider
);
final PolicyChainFactory policyChainFactory = policyChainFactory(policyManager);
final RequestProcessorChainFactory requestProcessorChainFactory = requestProcessorChainFactory(
api,
policyChainFactory,
policyManager,
applicationContext.getBean(PolicyChainProviderLoader.class),
authenticationHandlerSelector(
authenticationHandlerManager(securityProviderLoader(), authenticationHandlerEnhancer(api), apiComponentProvider)
)
);
final DefaultReferenceRegister referenceRegister = referenceRegister();
handler.setRequestProcessorChain(requestProcessorChainFactory);
handler.setResponseProcessorChain(
responseProcessorChainFactory(api, policyChainFactory, applicationContext.getBean(PolicyChainProviderLoader.class))
);
handler.setErrorProcessorChain(errorProcessorChainFactory(api, policyChainFactory));
final GroupLifecycleManager groupLifecycleManager = groupLifecyleManager(
api,
referenceRegister,
new EndpointFactoryImpl(),
applicationContext.getBean(GatewayConfiguration.class),
applicationContext.getBean(ConnectorRegistry.class),
applicationContext.getBean(Configuration.class),
applicationContext.getBean(ObjectMapper.class)
);
handler.setInvoker(
invokerFactory(api, applicationContext.getBean(Vertx.class), endpointResolver(referenceRegister, groupLifecycleManager))
.create()
);
handler.setPolicyManager(policyManager);
handler.setGroupLifecycleManager(groupLifecycleManager);
handler.setResourceLifecycleManager(resourceLifecycleManager);
ExecutionContextFactory executionContextFactory = executionContextFactory(apiComponentProvider);
executionContextFactory.addTemplateVariableProvider(new ApiTemplateVariableProvider(api));
executionContextFactory.addTemplateVariableProvider(referenceRegister);
applicationContext
.getBean(ApiTemplateVariableProviderFactory.class)
.getTemplateVariableProviders()
.forEach(executionContextFactory::addTemplateVariableProvider);
handler.setExecutionContextFactory(executionContextFactory);
return handler;
} else {
logger.warn("Api is disabled !");
}
} catch (Exception ex) {
logger.error("Unexpected error while creating API handler", ex);
}
return null;
}
public PolicyChainFactory policyChainFactory(PolicyManager policyManager) {
return new PolicyChainFactory(policyManager);
}
public PolicyManager policyManager(
Api api,
PolicyFactory factory,
PolicyConfigurationFactory policyConfigurationFactory,
PolicyClassLoaderFactory policyClassLoaderFactory,
ResourceLifecycleManager resourceLifecycleManager,
ComponentProvider componentProvider
) {
String[] beanNamesForType = applicationContext.getBeanNamesForType(
ResolvableType.forClassWithGenerics(ConfigurablePluginManager.class, PolicyPlugin.class)
);
ConfigurablePluginManager<PolicyPlugin<?>> ppm = (ConfigurablePluginManager<PolicyPlugin<?>>) applicationContext.getBean(
beanNamesForType[0]
);
return new ApiPolicyManager(
classLoaderLegacyMode,
applicationContext.getBean(DefaultClassLoader.class),
api,
factory,
policyConfigurationFactory,
ppm,
policyClassLoaderFactory,
resourceLifecycleManager,
componentProvider
);
}
public PolicyConfigurationFactory policyConfigurationFactory() {
return new CachedPolicyConfigurationFactory();
}
public ResourceLifecycleManager resourceLifecycleManager(
Api api,
ResourceClassLoaderFactory resourceClassLoaderFactory,
ResourceConfigurationFactory resourceConfigurationFactory,
ApplicationContext applicationContext
) {
String[] beanNamesForType = applicationContext.getBeanNamesForType(
ResolvableType.forClassWithGenerics(ConfigurablePluginManager.class, ResourcePlugin.class)
);
ConfigurablePluginManager<ResourcePlugin<?>> cpm = (ConfigurablePluginManager<ResourcePlugin<?>>) applicationContext.getBean(
beanNamesForType[0]
);
return new ResourceManagerImpl(
classLoaderLegacyMode,
applicationContext.getBean(DefaultClassLoader.class),
api,
cpm,
resourceClassLoaderFactory,
resourceConfigurationFactory,
applicationContext
);
}
public ResourceConfigurationFactory resourceConfigurationFactory() {
return new ResourceConfigurationFactoryImpl();
}
public SecurityProviderLoader securityProviderLoader() {
return new SecurityProviderLoader();
}
public AuthenticationHandlerManager authenticationHandlerManager(
SecurityProviderLoader securityProviderLoader,
AuthenticationHandlerEnhancer authenticationHandlerEnhancer,
ComponentProvider componentProvider
) {
AuthenticationHandlerManager authenticationHandlerManager = new AuthenticationHandlerManager(
securityProviderLoader,
componentProvider
);
authenticationHandlerManager.setAuthenticationHandlerEnhancer(authenticationHandlerEnhancer);
authenticationHandlerManager.afterPropertiesSet();
return authenticationHandlerManager;
}
public AuthenticationHandlerEnhancer authenticationHandlerEnhancer(Api api) {
return new PlanBasedAuthenticationHandlerEnhancer(api);
}
public AuthenticationHandlerSelector authenticationHandlerSelector(AuthenticationHandlerManager authenticationHandlerManager) {
return new DefaultAuthenticationHandlerSelector(authenticationHandlerManager);
}
public ExecutionContextFactory executionContextFactory(ComponentProvider componentProvider) {
return new ExecutionContextFactory(componentProvider);
}
public InvokerFactory invokerFactory(Api api, Vertx vertx, EndpointResolver endpointResolver) {
return new InvokerFactory(api, vertx, endpointResolver);
}
public DefaultReferenceRegister referenceRegister() {
return new DefaultReferenceRegister();
}
public GroupLifecycleManager groupLifecyleManager(
Api api,
ReferenceRegister referenceRegister,
EndpointFactory endpointFactory,
GatewayConfiguration gatewayConfiguration,
ConnectorRegistry connectorRegistry,
Configuration configuration,
ObjectMapper mapper
) {
return new DefaultGroupLifecycleManager(
api,
referenceRegister,
endpointFactory,
connectorRegistry,
configuration,
mapper,
gatewayConfiguration.tenant()
);
}
public EndpointResolver endpointResolver(ReferenceRegister referenceRegister, GroupManager groupManager) {
return new ProxyEndpointResolver(referenceRegister, groupManager);
}
public RequestProcessorChainFactory requestProcessorChainFactory(
Api api,
PolicyChainFactory policyChainFactory,
PolicyManager policyManager,
PolicyChainProviderLoader policyChainProviderLoader,
AuthenticationHandlerSelector authenticationHandlerSelector
) {
RequestProcessorChainFactory.RequestProcessorChainFactoryOptions options = new RequestProcessorChainFactory.RequestProcessorChainFactoryOptions();
options.setMaxSizeLogMessage(maxSizeLogMessage);
options.setOverrideXForwardedPrefix(overrideXForwardedPrefix);
options.setExcludedResponseTypes(excludedResponseTypes);
return new RequestProcessorChainFactory(
api,
policyChainFactory,
policyManager,
options,
policyChainProviderLoader,
authenticationHandlerSelector
);
}
public ResponseProcessorChainFactory responseProcessorChainFactory(
Api api,
PolicyChainFactory policyChainFactory,
PolicyChainProviderLoader policyChainProviderLoader
) {
return new ResponseProcessorChainFactory(api, policyChainFactory, policyChainProviderLoader, node);
}
public OnErrorProcessorChainFactory errorProcessorChainFactory(Api api, PolicyChainFactory policyChainFactory) {
return new OnErrorProcessorChainFactory(api, policyChainFactory);
}
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
}
| gravitee-io/gravitee-api-management |
<|start_filename|>src/events.js<|end_filename|>
/**
* This module provides functionality for constructing events similar to
* WebExtensions `events.Event` objects.
*
* @module events
*/
import * as debugging from "./debugging.js";
/**
* @constant {debugging.debuggingLogger}
* @private
*/
const debugLog = debugging.getDebuggingLog("events");
/**
* A callback function that is called immediately before a listener is added.
* @callback addListenerCallback
* @param {Function} listener - The listener that is being added.
* @param {Object} options - The options for the listener.
*/
/**
* A callback function that is called immediately after a listener is removed.
* @callback removeListenerCallback
* @param {Function} listener - The listener that was removed.
* @param {Object} options - The options that the listener was added with.
*/
/**
* A callback function that is called when a listener may be notified via
* `notifyListeners()`.
* @callback notifyListenersCallback
* @param {Function} listener - The listener that may be called.
* @param {Array} listenerArguments - The arguments that would be passed to the listener
* function.
* @param {Options} options - The options that the listener was added with.
* @returns {boolean} Whether to call the listener.
*/
/**
* A class that provides an event API similar to WebExtensions `events.Event` objects.
* Use the `createEvent` function to create an `Event` object.
* @hideconstructor
*/
class Event {
/**
* Creates an event instance similar to WebExtensions `events.Event` objects.
* @param {Object} [options] - A set of options for the event.
* @param {name} [options.name] - The name of the event.
* @param {addListenerCallback} [options.addListenerCallback] - A function that is
* called when a listener is added.
* @param {removeListenerCallback} [options.removeListenerCallback] - A function
* that is called when a listener is removed.
* @param {notifyListenersCallback} [options.notifyListenersCallback] - A function
* that is called before a listener is notified and can filter the notification.
*/
constructor({
name = null,
addListenerCallback = null,
removeListenerCallback = null,
notifyListenersCallback = null
} = {
name: null,
addListenerCallback: null,
removeListenerCallback: null,
notifyListenersCallback: null
}) {
this.name = name;
this.addListenerCallback = addListenerCallback;
this.removeListenerCallback = removeListenerCallback;
this.notifyListenersCallback = notifyListenersCallback;
this.listeners = new Map();
}
/**
* Add an event listener with the specified options. If the listener has
* previously been added for the event, the listener's options will be
* updated.
* @param {Function} listener - The listener to call when the event fires.
* @param {Object} options - Options for when the listener should be called.
*/
addListener(listener, options) {
if(this.addListenerCallback !== null) {
this.addListenerCallback(listener, options);
}
this.listeners.set(listener, options);
// If the event has a name, annotate the listener with the name
if(typeof this.name === "string") {
listener.webScienceEventName = this.name;
}
}
/**
* Remove an event listener.
* @param {Function} listener - The listener to remove.
*/
removeListener(listener) {
if(this.removeListenerCallback !== null) {
this.removeListenerCallback(listener, this.listeners.get(listener));
}
this.listeners.delete(listener);
}
/**
* Check whether a particular event listener has been added.
* @param {EventCallbackFunction} listener - The listener to check.
* @returns {boolean} Whether the listener has been added.
*/
hasListener(listener) {
return this.listeners.has(listener);
}
/**
* Check whether there are any listeners for the event.
* @returns {boolean} Whether there are any listeners for the event.
*/
hasAnyListeners() {
return this.listeners.size > 0;
}
/**
* Notify the listeners for the event.
* @param {Array} [listenerArguments=[]] - The arguments that will be passed to the
* listeners.
*/
notifyListeners(listenerArguments = []) {
this.listeners.forEach((options, listener) => {
try {
if((this.notifyListenersCallback === null) || this.notifyListenersCallback(listener, listenerArguments, options)) {
listener.apply(null, listenerArguments);
}
}
catch(error) {
debugLog(`Error in listener notification: ${error}`);
}
});
}
}
/**
* An extension of the Event class that permits only one listener at a time.
* @template EventCallbackFunction
* @template EventOptions
* @extends {Event<EventCallbackFunction, EventOptions>}
* @private
*/
class EventSingleton extends Event {
/**
* A function that adds an event listener, with optional parameters. If the
* listener has previously been added for the event, the listener's options
* (if any) will be updated.
* @param {EventCallbackFunction} listener - The function to call when the event fires.
* @param {EventOptions} options - Options for when the listener should be called.
* The supported option(s) depend on the event type.
* @throws {Error} This function throws an Error if there is already a listener for
* the event.
*/
addListener(listener, options) {
if(this.listeners.size > 0)
throw new Error("Error: cannot add more than one listener to EventSingleton event.");
super.addListener(listener, options);
}
}
/**
* Create a new Event object that implements WebExtensions event syntax, with the
* provided options.
* @param {Object} [options] - The options for the event.
* @param {string} options.name - The name of the event.
* @param {addListenerCallback} [options.addListenerCallback] - A function that is
* called when a listener is added.
* @param {removeListenerCallback} [options.removeListenerCallback] - A function
* that is called when a listener is removed.
* @param {notifyListenersCallback} [options.notifyListenersCallback] - A function
* that is called before a listener is notified and can filter the notification.
* @param {boolean} [options.singleton = false] - Whether to allow only one listener
* for the event.
* @returns {Event} - The created `Event` object.
*/
export function createEvent({
name = null,
addListenerCallback = null,
removeListenerCallback = null,
notifyListenersCallback = null,
singleton = false
} = {
name: null,
addListenerCallback: null,
removeListenerCallback: null,
notifyListenersCallback: null,
singleton: false
}) {
if(singleton) {
return /*@__PURE__*/new EventSingleton({
name,
addListenerCallback,
removeListenerCallback,
notifyListenersCallback
});
}
return /*@__PURE__*/new Event({
name,
addListenerCallback,
removeListenerCallback,
notifyListenersCallback
});
}
<|start_filename|>src/content-scripts/pageTransition.click.content.js<|end_filename|>
/**
* Content script for the pageTransition module that observes clicks on pages and notifies the
* module's background script. We use a separate pageTransition content script for generating
* `pageTransition.onPageTransitionData` event data, because the content scripts should run on
* different sets of pages. We consider document `mouseup`, `contextmenu`, and `keyup`
* enter/return key events to be identical, since the user could open a link with a left click,
* a right click (using the context menu), or pressing enter/return on a selected link. We also
* require that a page have the user's attention to consider a click, since otherwise the click
* was likely initiated by a script.
*
* @module pageTransition.click.content
*/
import { fromMonotonicClock } from "../timing.js";
// IIFE encapsulation to allow early return
(function () {
// Function encapsulation to wait for pageManager load
const pageManagerLoaded = function() {
const pageManager = window.webScience.pageManager;
/**
* The time, in milliseconds, to wait between sending click events to the background script.
* We also send clicks on page visit stop.
* @constant {number}
*/
const clickDebounceDelay = 200;
/**
* The timeout ID for debouncing click events. Set to 0 when there is no pending timeout.
* @type {number}
* @default
*/
let clickDebounceTimeoutID = 0;
/**
* Times when the user clicked on the page. Initialized to [ ] when `onPageVisitStart` fires and
* set to [ ] when there is no pending click to notify the background script about.
* @type {number[]}
* @default
*/
let clickTimeStamps = [ ];
/**
* The timestamp for the last click that the content script sent to the backround script.
* Initialized to 0 when `onPageVisitStart` fires.
* @type {number}
* @default
*/
let lastSentClickTimeStamp = 0;
/**
* Handle document `mouseup`, `contextmenu`, and `keyup` (with the enter key) events.
* @param {Event} event - The DOM event.
*/
const handleClickEvent = function(event) {
// If the page doesn't have the user's attention, ignore the click
if(!pageManager.pageHasAttention) {
return;
}
// Compute the event timestamp on the shared monotonic clock
const timeStamp = fromMonotonicClock(event.timeStamp, true);
// Queue the click for reporting to the background script
clickTimeStamps.push(timeStamp);
// If there's already a pending debounce timer, let it handle the click
if(clickDebounceTimeoutID > 0) {
return;
}
// If it's been longer than the debounce delay since the last click we sent,
// send the click immediately. This is a bit different from typical debounce
// logic (sending the initial event immediately, then waiting to debounce
// subsequent events), but we need to immediately send clicks to handle if
// the user has opened a link in a new tab.
if((clickTimeStamps[clickTimeStamps.length - 1] - lastSentClickTimeStamp) > clickDebounceDelay) {
notifyBackgroundScript();
}
// Otherwise, set a debounce timer to notify the background script
else {
clickDebounceTimeoutID = setTimeout(notifyBackgroundScript, clickDebounceDelay);
}
};
/**
* Notify the background script about the most recent click on the page.
*/
const notifyBackgroundScript = function() {
// If there is no pending click for notification, there's nothing to do
if(clickTimeStamps.length === 0) {
return;
}
// Clear the debounce timeout
clearTimeout(clickDebounceTimeoutID);
clickDebounceTimeoutID = 0;
// Send a message to the background script
browser.runtime.sendMessage({
type: "webScience.pageTransition.contentScriptClickUpdate",
pageId: pageManager.pageId,
clickTimeStamps
});
// Store the timestamp for the last click we've sent
lastSentClickTimeStamp = clickTimeStamps[clickTimeStamps.length - 1];
// Reset the clicks to send
clickTimeStamps = [ ];
}
// When the page visit start event fires, reset click tracking values
pageManager.onPageVisitStart.addListener(() => {
clearTimeout(clickDebounceTimeoutID);
clickDebounceTimeoutID = 0;
clickTimeStamps = [ ];
lastSentClickTimeStamp = 0;
});
// When the page visit stop event fires, send the most recent click
// even if we haven't waited the debounce time. This is important
// for handling a race condition in the interaction between the
// debounce delay and how recently the user must have clicked on a
// page to treat the click as a click transition for another page.
// Also store page click data in the window global for the event
// content script. We use this stored data if there's a History API
// load.
pageManager.onPageVisitStop.addListener(() => {
// We have to call notifyBackgroundScript before storing the most
// recent click in window.webScience.pageTransition, because
// notifyBackgroundScript could update the lastSentClickTimeStamp.
notifyBackgroundScript();
if(!("webScience" in window)) {
window.webScience = { };
}
if(!("pageTransition" in window.webScience)) {
window.webScience.pageTransition = { };
}
window.webScience.pageTransition.lastClickPageId = pageManager.pageId;
window.webScience.pageTransition.lastClickTimeStamp = lastSentClickTimeStamp;
});
// Handle mouse click events. We listen for the mouseup event rather than the
// click event because certain websites (e.g., YouTube) cancel the click event
// for a link as it bubbles through the DOM. We also listen for the contextmenu
// event because mouseup does not consistently fire for right clicks in Firefox
// (it does not fire if the user chooses to open a link in a new tab or window
// before releasing the right mouse button) and because mouseup does not fire
// at all for right clicks in Chrome. It's OK if handleClickEvent is called
// for more than one event caused by the same click.
document.addEventListener("mouseup", handleClickEvent);
document.addEventListener("contextmenu", handleClickEvent);
// Handle keyboard events.
document.addEventListener("keyup", event => {
if(event.code === "Enter") {
handleClickEvent(event);
}
});
};
// Wait for pageManager load
if (("webScience" in window) && ("pageManager" in window.webScience)) {
pageManagerLoaded();
}
else {
if(!("pageManagerHasLoaded" in window)) {
window.pageManagerHasLoaded = [];
}
window.pageManagerHasLoaded.push(pageManagerLoaded);
}
})();
<|start_filename|>tests/build/rollup.config.js<|end_filename|>
import commonjs from "@rollup/plugin-commonjs";
import resolve from "@rollup/plugin-node-resolve";
import webScienceRollupPlugin from "@mozilla/web-science/rollup-plugin";
export default (cliArgs) => {
const rollupConfig = [
{
input: "src/background.js",
output: {
file: "dist/background.js",
},
plugins: [
webScienceRollupPlugin(),
resolve({
browser: true,
}),
commonjs(),
],
}
];
return rollupConfig;
}
<|start_filename|>src/pageTransition.js<|end_filename|>
/**
* This module enables observing webpage transitions, synthesizing a range of
* transition data that may be valuable for browser-based studies. See the
* `onPageTransitionData` event for details.
*
* ## Types of Page Transition Data
* This module supports several types of page transition data. Some types are
* supported and recommended, because the data is consistently available, has
* consistent meaning, and reflects discrete categories of user interactions.
* Other types of transition data are supported because they appear in prior
* academic literature, but we do not recommend them because of significant
* limitations.
* * Supported and Recommended Types of Page Transition Data
* * WebExtensions Transitions - This module reports the same webpage
* transition data provided by the WebExtensions `webNavigation` API. There
* are two types of transition data: `TransitionType` (e.g., "link" or
* "typed") and `TransitionQualifier` (e.g., "from_address_bar" or
* "forward_back"). Note that Firefox's support for these values is mostly
* but not entirely complete and defaults to a "link" transition type. The
* MDN documentation for Firefox's implementation is also currently out of
* date, see: https://github.com/mdn/browser-compat-data/issues/9019. We
* recommend checking click transition data to confirm whether the user
* clicked on a link.
* * Tab-based Transitions - This module reports the webpage that was
* previously loaded in a new webpage's tab. If the webpage is loading in a
* newly created tab, this module reports the webpage that was open in
* the opener tab. We recommend using tab-based transition data when the user
* has clicked a link (according to both WebExtensions and click data), when
* the user has navigated with forward and back buttons, and when the page
* has refreshed (due to user action or automatically). In these situations,
* there is a clear causal relationship between the previous and current
* pages. We do not otherwise recommend using tab-based transition data,
* because the user might be reusing a tab for reasons unrelated to the page
* loaded in the tab.
* * Click Transitions - This module reports when a click on a webpage is
* immediately followed by a new webpage loading in the same tab (or a
* newly opened tab were that tab is the opener). This activity indicates
* the user likely clicked a link, and it compensates for limitations in
* how browsers detect link clicks for the `webNavigation` API.
* * Supported But Not Recommended Types of Page Transition Data
* * Referrers - This module reports the HTTP referrer for each new page. While
* referrers have long been a method for associating webpage loads with
* prior pages, they are not consistently available (webpages and browsers
* are increasingly limiting when referrers are sent), do not have consistent
* content (similarly, webpages and browsers are increasingly limiting
* referrers to just origins), and do not have consistent meaning (the rules
* for setting referrers are notoriously complex and can have nonintuitive
* semantics). Be especially careful with referrers for webpage loads via
* the History API---because there is no new document-level HTTP request, the
* referrer will not change when the URL changes.
* * Time-based Transitions - This module reports the most recent webpage that
* loaded in any tab. We do not recommend relying on this data, because a
* chronological ordering of webpage loads may have no relation to user
* activity or perception (e.g., a webpage might automatically reload in the
* background before a user navigates to a new page).
*
* ## Page Transition Data Sources
* This module builds on the page tracking provided by the `pageManager`
* module and uses browser events, DOM events, and a set of heuristics to
* associate transition information with each page visit. The module relies on
* the following sources of data about page transitions, in addition to the
* page visit tracking, attention tracking, and URL normalization provided by
* `pageManager`:
* * Background Script Data Sources
* * `webNavigation.onCommitted` - provides tab ID, url,
* `webNavigation.TransitionType`, and `webNavigation.TransitionQualifier`
* values when a new page is loading in a tab.
* * `webNavigation.onDOMContentLoaded` - provides tab ID, url, and a
* timestamp approximating when the `DOMContentLoaded` event fired on a
* page.
* * `webNavigation.onHistoryStateUpdated` - provides tab ID, url,
* `webNavigation.TransitionType`, and `webNavigation.TransitionQualifier`
* values when a new page loads in a tab via the History API.
* * `webNavigation.onCreatedNavigationTarget` - provides tab ID, source
* tab ID, and url when a page loads in a tab newly created by another
* tab. Because of a regression, this event does not currently fire
* in Firefox for a click on a link with the target="_blank" attribute.
* * `tabs.onCreated` - provides tab ID and source tab ID when a page
* loads in a tab newly created by another tab, except if the new
* tab is in a different window.
* * Content Script Data Sources
* * The `click` event on the `document` element - detects possible link
* clicks via the mouse (e.g., left click).
* * The `contextmenu` event on the `document` element - detects possible
* link clicks via the mouse (e.g., right click or control + click).
* * The `keyup` event on the document element - detects possible link
* clicks via the keyboard.
*
* ## Combining Data Sources into a Page Transition
* Merging these data sources into a page transition event poses several
* challenges.
* * We have to sync background script `webNavigation` events with content
* scripts. As with `pageManager`, we have to account for the possibility
* of race conditions between the background script and content script
* environments. We use the same general approach in this module as in
* `pageManager`, converting background script events into messages posted
* to content scripts. We have to be a bit more careful about race
* conditions than in `pageManager`, though, because if a tab property
* event handled in that module goes to the wrong content script the
* consequences are minimal (because correct event data will quickly
* arrive afterward). In this module, by contrast, an error could mean
* incorrectly associating a pair of pages. We further account for the
* possibility of race conditions by matching the `webNavigation` URL and
* DOMContentLoaded timestamp with the content script's URL and
* DOMContentLoaded timestamp.
* * We have to sync background script `webNavigation` events for different
* stages in the webpage loading lifecycle, because we want properties of
* both `webNavigation.onCommitted` and `webNavigation.onDOMContentLoaded`:
* the former has transition types and qualifiers, while the latter has a
* timestamp that is comparable to an event in the content script and does
* not have the risk of firing before the content script is ready to
* receive messages. Unlike `webRequest` events, `webNavigation` events are
* not associated with unique identifiers. We accomplish syncing across
* events by assuming that when the `webNavigation.onDOMContentLoaded` event
* fires for a tab, it is part of the same navigation lifecycle as the most
* recent `webNavigation.onCommitted` event in the tab.
* * We have to sync content script data for a page with content script
* data for a prior page (either loaded in the same tab, loaded in an
* opener tab, or loaded immediately before in time). We accomplish this for
* ordinary page loads by maintaining a cache of page visit data in the
* in the background script. We accomplish this for History API page loads
* by passing information in the content script environment.
* * We have to account for a regression in Firefox where
* `webNavigation.onCreatedNavigationTarget` does not currently fire for
* a click on a link with the target="_blank" attribute. We accomplish this
* by using `tabs.onCreated` event data when
* `webNavigation.onCreatedNavigationTarget` event data is not available.
*
* @see {@link https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/API/webNavigation/onCommitted}
* @see {@link https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/API/webNavigation/TransitionType}
* @see {@link https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/API/webNavigation/TransitionQualifier}
* @see {@link https://github.com/mdn/browser-compat-data/issues/9019}
* @see {@link https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/API/tabs/onCreated}
* @module pageTransition
*/
import * as events from "./events.js";
import * as permissions from "./permissions.js";
import * as messaging from "./messaging.js";
import * as matching from "./matching.js";
import * as timing from "./timing.js";
import * as pageManager from "./pageManager.js";
import pageTransitionEventContentScript from "include:./content-scripts/pageTransition.event.content.js";
import pageTransitionClickContentScript from "include:./content-scripts/pageTransition.click.content.js";
/**
* A listener for the `onPageTransitionData` event.
* @callback pageTransitionDataListener
* @memberof module:pageTransition.onPageTransitionData
* @param {Object} details - Additional information about the page transition data event.
* @param {string} details.pageId - The ID for the page, unique across browsing sessions.
* @param {string} details.url - The URL of the page, without any hash.
* @param {string} details.referrer - The referrer URL for the page, or `""` if there is no referrer. Note that we
* recommend against using referrers for analyzing page transitions.
* @param {number} details.tabId - The ID for the tab containing the page, unique to the browsing session. Note that if
* you send a message to the content script in the tab, there is a possible race condition where the page in
* the tab changes before your message arrives. You should specify a page ID (e.g., `pageId`) in your message to the
* content script, and the content script should check that page ID against its current page ID to ensure that the
* message was received by the intended page.
* @param {boolean} details.isHistoryChange - Whether the page transition was caused by a URL change via the History API.
* @param {boolean} details.isOpenedTab - Whether the page is loading in a tab that was newly opened from another tab.
* @param {number} details.openerTabId - If the page is loading in a tab that was newly opened from another tab
* (i.e., `isOpenedTab` is `true`), the tab ID of the opener tab. Otherwise, `tabs.TAB_ID_NONE`. Note that if
* you send a message to the content script in the tab, there is a possible race condition where the page in
* the tab changes before your message arrives. You should specify a page ID (e.g., `tabSourcePageId`) in your
* message to the content script, and the content script should check that page ID against its current page ID to
* ensure that the message was received by the intended page.
* @param {string} details.transitionType - The transition type, from `webNavigation.onCommitted` or
* `webNavigation.onHistoryStateUpdated`.
* @param {string[]} details.transitionQualifiers - The transition qualifiers, from `webNavigation.onCommitted` or
* `webNavigation.onHistoryStateUpdated`.
* @param {string} details.tabSourcePageId - The ID for the most recent page in the same tab. If the page is opening
* in a new tab, then the ID of the most recent page in the opener tab. The value is `""` if there is no such page.
* @param {string} details.tabSourceUrl - The URL, without any hash, for the most recent page in the same tab. If the page
* is opening in a new tab, then the URL of the most recent page in the opener tab. The value is `""` if there is no
* such page.
* @param {boolean} details.tabSourceClick - Whether the user recently clicked or pressed enter/return on the most recent
* page in the same tab. If the page is loading in a tab that was newly opened by another tab, then whether the user
* recently clicked or pressed enter/return on the most recent page in the opener tab. The value is `false` if there
* is no such page.
* @param {string} details.timeSourcePageId - The ID for the most recent page that loaded into any tab. If this is the
* first page visit after the extension starts, the value is "". Note that we recommend against using time-based
* page transition data.
* @param {string} details.timeSourceUrl - The URL for the most recent page that loaded into any tab. If this is the
* first page visit after the extension starts, the value is "". Note that we recommend against using time-based
* page transition data.
*/
/**
* @typedef {Object} PageTransitionDataListenerRecord
* @property {matching.MatchPatternSet} matchPatternSet - Match patterns for pages where the listener should be
* notified about transition data.
* @property {boolean} privateWindows - Whether to notify the listener about page transitions in
* private windows and whether to consider pages loaded in private windows when generating
* time-based transition information.
* @property {browser.contentScripts.RegisteredContentScript} contentScript - The content
* script associated with the listener.
* @private
*/
/**
* A map where each key is a listener and each value is a record for that listener.
* @constant {Map<pageTransitionDataListener, PageTransitionDataListenerRecord>}
* @private
*/
const pageTransitionDataListeners = new Map();
/**
* Add a listener for the `onPageTransitionData` event.
* @function addListener
* @memberof module:pageTransition.onPageTransitionData
* @param {pageTransitionDataListener} listener - The listener to add.
* @param {Object} options - Options for the listener.
* @param {string[]} options.matchPatterns - Match patterns for pages where the listener should be notified about
* transition data.
* @param {boolean} [options.privateWindows=false] - Whether to notify the listener about page transitions in
* private windows and whether to consider pages loaded in private windows when generating time-based
* transition information.
*/
/**
* Remove a listener for the `onPageTransitionData` event.
* @function removeListener
* @memberof module:pageTransition.onPageTransitionData
* @param {pageTransitionDataListener} listener - The listener to remove.
*/
/**
* Whether a specified listener has been added for the `onPageTransitionData` event.
* @function hasListener
* @memberof module:pageTransition.onPageTransitionData
* @param {pageTransitionDataListener} listener - The listener to check.
* @returns {boolean} Whether the listener has been added for the event.
*/
/**
* Whether the `onPageTransitionData` event has any listeners.
* @function hasAnyListeners
* @memberof module:pageTransition.onPageTransitionData
* @returns {boolean} Whether the event has any listeners.
*/
/**
* An event that fires when data about a page transition is available. The event will fire after
* the `pageManager.onPageVisitStart` event, when DOM content has loaded (for ordinary page loads)
* or just after the URL changes (for History API page loads).
* @namespace
*/
export const onPageTransitionData = events.createEvent({
name: "webScience.pageTransition.onPageTransitionData",
addListenerCallback: addListener,
removeListenerCallback: removeListener,
notifyListenersCallback: () => { return false; }
});
/**
* A callback function for adding a page transition data listener. The options for the listener must be kept in
* sync with the public `onPageTransitionData.addListener` type.
* @param {pageTransitionDataListener} listener - The listener being added.
* @param {Object} options - Options for the listener.
* @param {string[]} options.matchPatterns - Match patterns for pages where the listener should be notified about
* transition data.
* @param {boolean} [options.privateWindows=false] - Whether to notify the listener about page transitions in
* private windows and whether to consider pages loaded in private windows when generating time-based
* transition information.
* @private
*/
async function addListener(listener, {
matchPatterns,
privateWindows = false
}) {
await initialize();
// Store a record for the listener
pageTransitionDataListeners.set(listener, {
// Compile the listener's match pattern set
matchPatternSet: matching.createMatchPatternSet(matchPatterns),
privateWindows,
// Register the event content script with the listener's match patterns
contentScript: await browser.contentScripts.register({
matches: matchPatterns,
js: [{
file: pageTransitionEventContentScript
}],
runAt: "document_start"
})
});
}
/**
* A callback function for removing a page transition data listener.
* @param {pageTransitionDataListener} listener - The listener that is being removed.
* @private
*/
function removeListener(listener) {
const listenerRecord = pageTransitionDataListeners.get(listener);
if(listenerRecord === undefined) {
return;
}
listenerRecord.contentScript.unregister();
pageTransitionDataListeners.delete(listenerRecord);
}
/**
* Whether the module has been initialized.
* @type {boolean}
* @private
*/
let initialized = false;
/**
* Initialize the module, registering event handlers and message schemas.
* @private
*/
async function initialize() {
if(initialized) {
return;
}
initialized = true;
permissions.check({
module: "webScience.pageTransition",
requiredPermissions: [ "webNavigation" ],
suggestedOrigins: [ "<all_urls>" ]
});
await pageManager.initialize();
// Register the click content script for all URLs permitted by the extension manifest
await browser.contentScripts.register({
matches: permissions.getManifestOriginMatchPatterns(),
js: [{
file: pageTransitionClickContentScript
}],
runAt: "document_start"
});
// When pageManager.onPageVisitStart fires...
pageManager.onPageVisitStart.addListener(({ pageId, url, pageVisitStartTime, privateWindow, tabId }) => {
// Add the page visit's page ID, URL, start time, and private window status to the time-based transition cache
pageVisitTimeCache[pageId] = { url, pageVisitStartTime, privateWindow };
// Add the page visit's tab ID, page ID, URL, and start time to the tab-based transition cache
let cachedPageVisitsForTab = pageVisitTabCache.get(tabId);
if(cachedPageVisitsForTab === undefined) {
cachedPageVisitsForTab = { };
pageVisitTabCache.set(tabId, cachedPageVisitsForTab);
}
cachedPageVisitsForTab[pageId] = { url, pageVisitStartTime, clickTimeStamps: [ ] };
// We can't remove stale pages from the time-based and tab-based caches here, because otherwise we can
// have a race condition where the most recent page in a cache (from pageManager.onPageVisitStart)
// is the same page that's about to receive a message from the background script (because of
// webNavigation.onDOMContentLoaded). In that situation, we might evict an older page from the cache
// that was the correct page for time-based or tab-based transition information.
});
// When webNavigation.onCommitted fires, store the details in the per-tab onCommitted details cache
browser.webNavigation.onCommitted.addListener(details => {
// Ignore subframe navigation
if(details.frameId !== 0) {
return;
}
webNavigationOnCommittedCache.set(details.tabId, details);
}, {
url: [ { schemes: [ "http", "https" ] } ]
});
// When webNavigation.onDOMContentLoaded fires, pull the webNavigation.onCommitted
// details from the per-tab cache and notify the content script
browser.webNavigation.onDOMContentLoaded.addListener(details => {
// Ignore subframe navigation
if(details.frameId !== 0) {
return;
}
// Get the cached webNavigation.onCommitted details and expire the cache
const webNavigationOnCommittedDetails = webNavigationOnCommittedCache.get(details.tabId);
if(webNavigationOnCommittedDetails === undefined) {
return;
}
webNavigationOnCommittedCache.delete(details.tabId);
// Confirm that the webNavigation.onCommitted URL matches the webNavigation.onDOMContentLoaded URL
if(details.url !== webNavigationOnCommittedDetails.url) {
return;
}
// Notify the content script
sendUpdateToContentScript({
tabId: details.tabId,
url: details.url,
timeStamp: timing.fromSystemClock(details.timeStamp),
webNavigationTimeStamp: details.timeStamp,
transitionType: webNavigationOnCommittedDetails.transitionType,
transitionQualifiers: webNavigationOnCommittedDetails.transitionQualifiers,
isHistoryChange: false
});
}, {
url: [ { schemes: [ "http", "https" ] } ]
});
// When webNavigation.onHistoryStateUpdated fires, notify the content script
browser.webNavigation.onHistoryStateUpdated.addListener(details => {
// Ignore subframe navigation
if(details.frameId !== 0) {
return;
}
// Notify the content script
sendUpdateToContentScript({
tabId: details.tabId,
url: details.url,
timeStamp: timing.fromSystemClock(details.timeStamp),
webNavigationTimeStamp: details.timeStamp,
transitionType: details.transitionType,
transitionQualifiers: details.transitionQualifiers,
isHistoryChange: true
});
}, {
url: [ { schemes: [ "http", "https" ] } ]
});
// Register the message schemas for background script updates
messaging.registerSchema("webScience.pageTransition.backgroundScriptEventUpdate", {
url: "string",
timeStamp: "number",
webNavigationTimeStamp: "number",
transitionType: "string",
transitionQualifiers: "object",
pageVisitTimeCache: "object",
cachedPageVisitsForTab: "object",
isHistoryChange: "boolean",
isOpenedTab: "boolean",
openerTabId: "number",
tabOpeningTimeStamp: "number"
});
// When webNavigation.onCreatedNavigationTarget fires, update the the opener tab cache.
// This event fires for all opened tabs regardless of window, except for a regression
// since Firefox 65 where the event does not fire for tabs opened by clicking a link
// with a target="_blank" attribute. See https://github.com/mdn/content/issues/4507.
// We observe those tab openings tabs.onCreated, since the tabs are always in the same
// window. We do not use the URL from webNavigation.onCreatedNavigationTarget, because
// an HTTP redirect might change the URL before webNavigation.onCommitted and
// webNavigation.onDOMContentLoaded fire.
browser.webNavigation.onCreatedNavigationTarget.addListener(details => {
openerTabCache.set(details.tabId, {
openerTabId: details.sourceTabId,
timeStamp: details.timeStamp
});
}, {
url: [ { schemes: [ "http", "https" ] } ]
});
// When tabs.onCreated fires, update the opener tab cache. This event fires for all opened
// tabs in the same window, but not opened tabs in a new window. We observe tabs that open
// in new windows with webNavigation.onCreatedNavigationTarget.
browser.tabs.onCreated.addListener(tab => {
// Ignore non-content tabs
if(!("id" in tab) || (tab.id === browser.tabs.TAB_ID_NONE)) {
return;
}
// Ignore tabs without content opener tabs
if(!("openerTabId" in tab) || (tab.openerTabId === browser.tabs.TAB_ID_NONE)) {
return;
}
// If we've already populated the opener tab cache for this tab with data from a more
// detailed webNavigation.onCreatedNavigationTarget event, ignore this event
if(openerTabCache.get(tab.id) !== undefined) {
return;
}
openerTabCache.set(tab.id, {
openerTabId: tab.openerTabId,
timeStamp: timing.now()
});
});
// When tabs.onRemoved fires, set a timeout to expire the tab-based transition information
// and opener information for that tab
browser.tabs.onRemoved.addListener(tabId => {
setTimeout(() => {
pageVisitTabCache.delete(tabId);
openerTabCache.delete(tabId);
}, tabRemovedExpiry);
});
// When the event content script sends an update message, notify the relevant listeners
messaging.onMessage.addListener((eventUpdateMessage, sender) => {
for(const [listener, listenerRecord] of pageTransitionDataListeners) {
if(eventUpdateMessage.privateWindow && !listenerRecord.privateWindows) {
continue;
}
if(listenerRecord.matchPatternSet.matches(eventUpdateMessage.url)) {
listener({
pageId: eventUpdateMessage.pageId,
url: eventUpdateMessage.url,
referrer: eventUpdateMessage.referrer,
tabId: sender.tab.id,
isHistoryChange: eventUpdateMessage.isHistoryChange,
isOpenedTab: eventUpdateMessage.isOpenedTab,
openerTabId: eventUpdateMessage.openerTabId,
transitionType: eventUpdateMessage.transitionType,
transitionQualifiers: eventUpdateMessage.transitionQualifiers.slice(),
tabSourcePageId: eventUpdateMessage.tabSourcePageId,
tabSourceUrl: eventUpdateMessage.tabSourceUrl,
tabSourceClick: eventUpdateMessage.tabSourceClick,
timeSourcePageId: listenerRecord.privateWindows ? eventUpdateMessage.timeSourcePageId : eventUpdateMessage.timeSourceNonPrivatePageId,
timeSourceUrl: listenerRecord.privateWindows ? eventUpdateMessage.timeSourceUrl : eventUpdateMessage.timeSourceNonPrivateUrl
});
}
}
},
{
type: "webScience.pageTransition.contentScriptEventUpdate",
schema: {
pageId: "string",
url: "string",
isHistoryChange: "boolean",
isOpenedTab: "boolean",
openerTabId: "number",
transitionType: "string",
transitionQualifiers: "object",
tabSourcePageId: "string",
tabSourceUrl: "string",
tabSourceClick: "boolean",
timeSourcePageId: "string",
timeSourceUrl: "string",
timeSourceNonPrivatePageId: "string",
timeSourceNonPrivateUrl: "string",
privateWindow: "boolean"
}
});
// When the click content script sends an update message, update the tab-based transition cache
messaging.onMessage.addListener((clickUpdateMessage, sender) => {
// There should be a tab ID associated with the message, but might as well make certain
if(!("tab" in sender) || !("id" in sender.tab)) {
return;
}
// Update the cached link clicks for the page
const cachedPageVisitsForTab = pageVisitTabCache.get(sender.tab.id);
if((cachedPageVisitsForTab === undefined) || !(clickUpdateMessage.pageId in cachedPageVisitsForTab)) {
return;
}
cachedPageVisitsForTab[clickUpdateMessage.pageId].clickTimeStamps = cachedPageVisitsForTab[clickUpdateMessage.pageId].clickTimeStamps.concat(clickUpdateMessage.clickTimeStamps);
},
{
type: "webScience.pageTransition.contentScriptClickUpdate",
schema: {
pageId: "string",
clickTimeStamps: "object"
}
});
}
/**
* A map where keys are tab IDs and values are the most recent `webNavigation.onCommitted`
* details, removed from the map when a subsequent `webNavigation.onDOMContentLoaded` fires
* for the tab.
* @constant {Map<number, Object>}
* @private
*/
const webNavigationOnCommittedCache = new Map();
/**
* A map, represented as an object, where keys are page IDs and values are objects with
* `pageVisitStartTime`, `url`, and `privateWindow` properties from `pageManager.onPageVisitStart`.
* We use an object so that it can be easily serialized. The reason we maintain this cache
* is to account for possible race conditions between when pages load in the content script
* environment and when the background script environment learns about page loads.
* @constant {Object}
* @private
*/
const pageVisitTimeCache = { };
/**
* The maximum time, in milliseconds, to consider a page visit in any tab as a possible most
* recent page visit in the content script environment, even though it's not the most recent
* page visit in the background script environment.
* @constant {number}
* @private
*/
const pageVisitTimeCacheExpiry = 1000;
/**
* @typedef {Object} PageVisitCachedDetails
* @property {number} pageVisitStartTime - The page visit start time from `pageManager`.
* @property {string} url - The URL from `pageManager`.
* @property {number[]} clickTimeStamps - Timestamps for recent clicks on the page, from
* the module's click content script.
* @private
*/
/**
* A map where keys are tab IDs and values are maps, represented as objects, where keys
* are page IDs and values are PageVisitCachedDetails objects.
* @constant {Map<number, Object>}
* @private
*/
const pageVisitTabCache = new Map();
/**
* The maximum time, in milliseconds, to consider a page visit in a specific tab as a possible
* most recent page visit for that tab in the content script environment, even though it's not
* the most recent page visit for that tab in the background script environment.
* @constant {number}
* @private
*/
const pageVisitTabCacheExpiry = 5000;
/**
* The maximum time, in milliseconds, to consider a click on a page as a possible most recent
* click on the page in the content script environment, even though it's not the most recent
* click in the background script environment.
* @constant {number}
* @private
*/
const clickCacheExpiry = 5000;
/**
* The minimum time, in milliseconds, to wait after a tab is removed before expiring the cache
* of page visits in that tab for tab-based transition information and the cached opener tab
* for that tab.
* @constant {number}
* @private
*/
const tabRemovedExpiry = 10000;
/**
* A map where keys are tab IDs and values are objects with `openerTabId` and `timeStamp`
* properties.
* @constant {Map<number, Object>}
* @private
*/
const openerTabCache = new Map();
/**
* Send an update to the content script running on a page, called when a
* `webNavigation.onDOMContentLoaded` or `webNavigation.onHistoryStateUpdated`
* event fires.
* @param {Object} details - Details for the update to the content script.
* @param {number} details.tabId - The tab ID for the tab where the page is loading.
* @param {string} details.url - The URL for the page.
* @param {number} details.timeStamp - The timestamp for the page that is loading, either from
* `webNavigation.onDOMContentLoaded` or `webNavigation.onHistoryStateUpdated`, adjusted to
* the shared monotonic clock.
* @param {number} details.webNavigationTimeStamp - The timestamp for the page that is loading,
* either from `webNavigation.onDOMContentLoaded` or `webNavigation.onHistoryStateUpdated`.
* This timestamp, from the event, is on the system clock rather than the shared monotonic
* clock.
* @param {string} details.transitionType - The transition type for the page that is loading,
* `webNavigation.onDOMContentLoaded` or `webNavigation.onHistoryStateUpdated`.
* @param {string[]} details.transitionQualifiers - The transition qualifiers for the page
* that is loading, either from `webNavigation.onDOMContentLoaded` or
* `webNavigation.onHistoryStateUpdated`.
* @param {boolean} details.isHistoryChange - Whether the update was caused by
* `webNavigation.onDOMContentLoaded` (`false`) or `webNavigation.onHistoryStateUpdated`
* (`true`).
* @private
*/
function sendUpdateToContentScript({
tabId,
url,
timeStamp,
webNavigationTimeStamp,
transitionType,
transitionQualifiers,
isHistoryChange
}) {
// Retrieve cached page visits for this tab if this is not a History API change
let cachedPageVisitsForTab = { };
if(!isHistoryChange) {
cachedPageVisitsForTab = pageVisitTabCache.get(tabId);
}
// Get the cached opener tab details if this is not a History API change
let isOpenedTab = false;
let openerTabId = browser.tabs.TAB_ID_NONE;
let tabOpeningTimeStamp = 0;
if(!isHistoryChange) {
const openerTabDetails = openerTabCache.get(tabId);
// If there are cached opener tab details, expire the cache and swap in the cached page
// visits for the opener tab
if(openerTabDetails !== undefined) {
openerTabCache.delete(tabId);
isOpenedTab = true;
openerTabId = openerTabDetails.openerTabId;
tabOpeningTimeStamp = openerTabDetails.timeStamp;
cachedPageVisitsForTab = pageVisitTabCache.get(openerTabDetails.openerTabId);
}
}
// Send a message to the content script with transition information. The content script will
// merge this information with its local information to generate a PageTransitionData event.
messaging.sendMessageToTab(tabId, {
type: "webScience.pageTransition.backgroundScriptEventUpdate",
url,
timeStamp,
webNavigationTimeStamp,
transitionType,
transitionQualifiers,
isHistoryChange,
pageVisitTimeCache,
cachedPageVisitsForTab: (cachedPageVisitsForTab !== undefined) ? cachedPageVisitsForTab : { },
isOpenedTab,
openerTabId,
tabOpeningTimeStamp
});
// Remove stale page visits from the time-based transition cache, retaining the most recent page
// visit in any window and the most recent page visit in only non-private windows. We have to
// track the most recent non-private page separately, since a listener might only be registered
// for transitions involving non-private pages. We perform this expiration after sending a
// message to the content script, for the reasons explained in the pageManager.onPageVisitStart
// listener.
const nowTimeStamp = timing.now();
const expiredCachePageIds = new Set();
let mostRecentPageId = "";
let mostRecentPageVisitStartTime = 0;
let mostRecentNonPrivatePageId = "";
let mostRecentNonPrivatePageVisitStartTime = 0;
for(const cachePageId in pageVisitTimeCache) {
if(pageVisitTimeCache[cachePageId].pageVisitStartTime > mostRecentPageVisitStartTime) {
mostRecentPageId = cachePageId;
mostRecentPageVisitStartTime = pageVisitTimeCache[cachePageId].pageVisitStartTime;
}
if(!pageVisitTimeCache[cachePageId].privateWindow && (pageVisitTimeCache[cachePageId].pageVisitStartTime > mostRecentNonPrivatePageVisitStartTime)) {
mostRecentNonPrivatePageId = cachePageId;
mostRecentNonPrivatePageVisitStartTime = pageVisitTimeCache[cachePageId].pageVisitStartTime;
}
if((nowTimeStamp - pageVisitTimeCache[cachePageId].pageVisitStartTime) > pageVisitTimeCacheExpiry) {
expiredCachePageIds.add(cachePageId);
}
}
expiredCachePageIds.delete(mostRecentPageId);
expiredCachePageIds.delete(mostRecentNonPrivatePageId);
for(const expiredCachePageId of expiredCachePageIds) {
delete pageVisitTimeCache[expiredCachePageId];
}
// Remove stale page visits and clicks from the tab-based transition cache. We don't have to
// handle private and non-private windows separately, because if a tab precedes another tab
// we know they have the same private window status.
if(cachedPageVisitsForTab !== undefined) {
// Expire stale pages, expect for the most recent page if it's also stale
mostRecentPageId = "";
mostRecentPageVisitStartTime = 0;
expiredCachePageIds.clear();
for(const cachePageId in cachedPageVisitsForTab) {
if(cachedPageVisitsForTab[cachePageId].pageVisitStartTime > mostRecentPageVisitStartTime) {
mostRecentPageId = cachePageId;
mostRecentPageVisitStartTime = cachedPageVisitsForTab[cachePageId].pageVisitStartTime;
}
if((nowTimeStamp - cachedPageVisitsForTab[cachePageId].pageVisitStartTime) > pageVisitTabCacheExpiry) {
expiredCachePageIds.add(cachePageId);
}
}
expiredCachePageIds.delete(mostRecentPageId);
for(const expiredCachePageId of expiredCachePageIds) {
delete cachedPageVisitsForTab[expiredCachePageId];
}
// Expire stale clicks on the remaining pages, except for the most recent click if it's
// also stale
for(const cachePageId in cachedPageVisitsForTab) {
let mostRecentClickOnPage = 0;
const clickTimeStamps = [ ];
for(const clickTimeStamp of cachedPageVisitsForTab[cachePageId].clickTimeStamps) {
if((nowTimeStamp - clickTimeStamp) <= clickCacheExpiry) {
clickTimeStamps.push(clickTimeStamp);
}
mostRecentClickOnPage = Math.max(mostRecentClickOnPage, clickTimeStamp);
}
if((clickTimeStamps.length === 0) && (mostRecentClickOnPage > 0)) {
clickTimeStamps.push(mostRecentClickOnPage);
}
cachedPageVisitsForTab[cachePageId].clickTimeStamps = clickTimeStamps;
}
}
}
<|start_filename|>src/data/ampCachesAndViewers.js<|end_filename|>
/**
* An array of AMP cache domains, represented as domains and paths.
* This representation facilitates parsing AMP urls in `LinkResolution`.
* Note that the Cloudflare cache is deprecated, but we retain it for
* completeness.
* @constant {string[]}
* @see {@link https://cdn.ampproject.org/caches.json}
* @see {@link https://blog.cloudflare.com/announcing-amp-real-url/}
*/
export const ampCacheDomains = [
"amp.cloudflare.com", // Cloudflare AMP Cache
"cdn.ampproject.org", // Google AMP Cache
"www.bing-amp.com" // Microsoft Bing AMP Cache
];
/**
* An array of AMP viewers, represented as domains with paths.
* This representation facilitates parsing AMP urls in `LinkResolution`.
* @constant {string[]}
* @see {@link https://developers.google.com/search/docs/guides/about-amp}
*/
export const ampViewerDomainsAndPaths = [
"www.google.com/amp" // Google AMP Viewer
];
<|start_filename|>src/data/urlShortenersWithContent.js<|end_filename|>
/**
* An array of match patterns for known URL shorteners that also host
* content with the same public suffix + 1. We maintain this subset of
* the dataset of known URL shorteners so that we can correctly and
* efficiently handle links that initially appear to be self-links based
* on public suffix + 1, but that might resolve to a URL with a different
* public suffix + 1.
*/
export const urlShortenerWithContentMatchPatterns = [
"*://news.google.com/articles/*"
];
<|start_filename|>src/userSurvey.js<|end_filename|>
/**
* A module to facilitate surveys of study participants. See the
* documentation for `setSurvey` for additional details about usage.
*
* ## User Experience
* * If the user has not been previously prompted for the survey,
* the survey will open in a new tab.
* * The study's browser action popup will contain either a page
* prompting the user to complete the survey (with options to open
* the survey or decline the survey), or a neutral page (if the
* user has already completed or declined the survey).
* * If the user has been previously prompted for the survey, and
* has not completed or declined the survey, the user will be
* reminded to complete the survey with a browser notification
* at a set interval.
*
* ## Limitations
* Note that this module is currently very limited: it only supports
* one survey at a time per study, with few options and a constrained design.
* We have not yet decided whether to build out this module or implement
* survey functionality in the Rally core add-on.
*
* @module userSurvey
*/
import * as id from "./id.js";
import * as timing from "./timing.js";
import * as storage from "./storage.js";
import * as messaging from "./messaging.js";
import * as permissions from "./permissions.js";
import popupPromptPage from "include:./browser-action-assets/userSurvey.popupPrompt.html";
import popupNoPromptPage from "include:./browser-action-assets/userSurvey.popupNoPrompt.html";
/**
* A persistent storage space for data about surveys.
* @type {storage.KeyValueStorage}
* @private
*/
let storageSpace = null;
/**
* The ID of the survey reminder timeout (is null if there
* is no such timeout).
* @type {number|null}
* @private
*/
let reminderTimeoutId = null;
/**
* Whether listeners for this module have already been registered.
* @type {boolean}
* @private
*/
let listenersRegistered = false;
/**
* When we last asked the user to do the survey, either with a browser
* notification or through opening a tab with the survey.
* @type {number}
* @private
*/
let lastSurveyRequest = 0;
/**
* A fully-qualified URL to an icon file to use for for reminding the
* user with a notification to complete the survey (is null if there is
* no such icon).
* @type {string|null}
* @private
*/
let reminderIconUrl = null;
/**
* How often, in seconds, to wait before reminding the user with a
* notification to participate in the survey.
* @type {number}
* @private
*/
let reminderInterval = 0;
/**
* The message to use for reminding the user with a notification to
* complete the survey.
* @type {string}
* @private
*/
let reminderMessage = "";
/**
* The title to use for reminding the user with a notification to
* complete the survey.
* @type {string}
* @private
*/
let reminderTitle = "";
/**
* The URL for the survey on an external platform
* (e.g., SurveyMonkey, Typeform, Qualtrics, etc.).
* @type {string}
* @private
*/
let surveyUrl = "";
const millisecondsPerSecond = 1000;
/**
* Opens the survey URL in a new browser tab, appending parameters
* for the participant's survey ID (surveyID) and timezone offset
* (timezone).
* @private
*/
async function openSurveyInNewTab() {
const surveyId = await getSurveyId();
const surveyUrlObj = new URL(surveyUrl);
surveyUrlObj.searchParams.append("surveyId", surveyId);
surveyUrlObj.searchParams.append("timezone", new Date().getTimezoneOffset());
browser.tabs.create({
active: true,
url: surveyUrlObj.href
});
}
/**
* Set a timeout to remind the user to complete the study.
* @private
*/
function scheduleReminderForUser() {
reminderTimeoutId = setTimeout(remindUser, Math.max((lastSurveyRequest + reminderInterval * millisecondsPerSecond) - timing.now(), 0));
}
/**
* Remind the user to complete the study, by prompting with a notification.
* @private
*/
async function remindUser() {
const surveyCompleted = await storageSpace.get("surveyCompleted");
const surveyCancelled = await storageSpace.get("surveyCancelled");
if (surveyCompleted || surveyCancelled) {
return;
}
lastSurveyRequest = timing.now();
await storageSpace.set("lastSurveyRequest", lastSurveyRequest);
browser.notifications.create({
type: "image",
message: reminderMessage,
title: reminderTitle,
iconUrl: reminderIconUrl
});
scheduleReminderForUser();
}
/**
* Set the browser action popup to the survey's no prompt page.
* @private
*/
function setPopupToNoPromptPage() {
browser.browserAction.setPopup({
popup: browser.runtime.getURL(popupNoPromptPage)
});
}
/**
* Initialize storage for the module.
* @private
*/
function initializeStorage() {
if (storageSpace === null) {
storageSpace = storage.createKeyValueStorage("webScience.userSurvey");
}
}
/**
* Listener for webRequest.onBeforeRequest when the URL is the survey
* completion URL. Sets surveyCompleted to true in storage and changes
* the browser action popup to the survey's no prompt page.
* @private
*/
function surveyCompletionUrlListener() {
storageSpace.set("surveyCompleted", true);
setPopupToNoPromptPage();
}
/**
* Prompt the user to respond to a survey. There can only be one current survey at a time.
*
* ##### Survey Behavior
* * If there is no current survey (i.e., if `setSurvey` was not previously called or
* `endSurvey` was called after `setSurvey`), this function creates a new current
* survey with the provided options, persists current survey details in storage, and
* configures survey UX.
* * If there is a current survey and `options.surveyName` matches the name of the
* current survey, this function continues the current survey with the details persisted
* in storage and configures survey UX.
* * If there is already a current survey and `options.surveyName` does not match the
* name of the current survey, throws an `Error` as there can only be one current survey
* at a time.
*
* ##### Single-Survey Studies
* If your study involves a single survey, call `setSurvey` when you first want to prompt
* the user to complete the survey, then call `setSurvey` with an identical survey name on
* subsequent extension startups to continue the survey.
*
* ##### Multi-Survey Studies
* If there is more than one survey in your study, you must call `endSurvey` for the current
* survey before calling `setSurvey` for the next survey.
*
* @param {Object} options - The options for the survey.
* @param {string} options.surveyName - A unique name for the survey within the study.
* @param {string} options.popupNoPromptMessage - A message to present to the
* user when there is no survey to prompt.
* @param {string} options.popupPromptMessage - A message to present to the user
* when there is a survey to prompt.
* @param {string} [options.popupIcon] - A path to an icon file, relative
* to the study extension's root, to use for for the browser action popup.
* This property is optional as the popup does not need to display an icon.
* @param {string} [options.reminderIcon] - A path to an icon file, relative
* to the study extension's root, to use for for reminding the user with a
* notification to complete the survey. This property is optional as the
* notification does not need to display an icon.
* @param {number} options.reminderInterval - How often, in seconds, to wait before
* reminding the user with a notification to participate in the survey.
* @param {string} options.reminderMessage - The message to use for reminding the
* user with a notification to complete the survey.
* @param {string} options.reminderTitle - The title to use for reminding the
* user with a notification to complete the survey.
* @param {string} options.surveyCompletionUrl - A URL that, when loaded,
* indicates the user has completed the survey.
* @param {string} options.surveyUrl - The URL for the survey on an external
* platform (e.g., SurveyMonkey, Typeform, Qualtrics, etc.).
*/
export async function setSurvey(options) {
permissions.check({
module: "webScience.userSurvey",
requiredPermissions: [ "notifications", "webRequest" ]
});
initializeStorage();
let surveyDetails = await storageSpace.get("surveyDetails");
// If there's no survey in storage, save the parameters in
// storage and carry out the survey based on the parameters.
// If options.surveyName differs from the survey name in storage,
// throw an error, because only one survey can be set at a time.
// Otherwise, options.surveyName is the same as the survey name in
// storage. In this case, use the survey attributes from storage.
if (!surveyDetails) {
surveyDetails = options;
await storageSpace.set("surveyDetails", options);
} else if (surveyDetails.surveyName !== options.surveyName) {
throw new Error("userSurvey only supports one survey at a time. Complete the survey that has previously been set.");
}
const currentTime = timing.now();
({surveyUrl,reminderInterval, reminderTitle, reminderMessage } = surveyDetails);
browser.storage.local.set({
"webScience.userSurvey.popupPromptMessage": surveyDetails.popupPromptMessage
});
browser.storage.local.set({
"webScience.userSurvey.popupNoPromptMessage": surveyDetails.popupNoPromptMessage
});
reminderIconUrl = surveyDetails.reminderIcon ?
browser.runtime.getURL(surveyDetails.reminderIcon) : null;
browser.storage.local.set({
"webScience.userSurvey.popupIconUrl":
surveyDetails.popupIcon ? browser.runtime.getURL(surveyDetails.popupIcon) : null
});
// Check when we last asked the user to do the survey. If it's null,
// we've never asked, which means the extension just got installed.
// Open a tab with the survey, and save this time as the most recent
// request for participation.
lastSurveyRequest = await storageSpace.get("lastSurveyRequest");
const surveyCompleted = await storageSpace.get("surveyCompleted");
const surveyCancelled = await storageSpace.get("surveyCancelled");
// Configure the browser action popup page
if (surveyCompleted || surveyCancelled) {
setPopupToNoPromptPage();
return;
}
else {
browser.browserAction.setPopup({
popup: browser.runtime.getURL(popupPromptPage)
});
}
// If this is the first survey request, open the survey in a new tab.
if (lastSurveyRequest === null) {
lastSurveyRequest = currentTime;
await storageSpace.set("lastSurveyRequest", lastSurveyRequest);
// Since this is the first survey request, initialize the stored
// completed and cancelled state to false.
await storageSpace.set("surveyCompleted", false);
await storageSpace.set("surveyCancelled", false);
openSurveyInNewTab();
}
// Schedule a reminder for the user
scheduleReminderForUser();
// Set a listener for the survey completion URL.
browser.webRequest.onBeforeRequest.addListener(
surveyCompletionUrlListener,
{ urls: [ (new URL(surveyDetails.surveyCompletionUrl)).href + "*" ] }
);
// Listeners for cancel and open survey button click only need to be added once.
// They do not need to be added again for subsequent calls to setSurvey.
// These listeners do not need to be removed in endCurrentSurvey because they will
// not receive messages when the popup is the no prompt page.
if (!listenersRegistered) {
// Set listeners for cancel and open survey button clicks in the survey request.
messaging.onMessage.addListener(() => {
storageSpace.set("surveyCancelled", true);
setPopupToNoPromptPage();
browser.webRequest.onBeforeRequest.removeListener(surveyCompletionUrlListener);
}, { type: "webScience.userSurvey.cancelSurvey" });
messaging.onMessage.addListener(() => {
openSurveyInNewTab();
}, { type: "webScience.userSurvey.openSurvey" });
}
listenersRegistered = true;
}
/**
* Each study participant has a persistent survey ID, generated with
* the `id` module. The ID is automatically added as a parameter to
* the survey URL, enabling researchers to import survey data from an
* external platform and sync it with Rally data. This method returns the
* survey ID, generating it if it does not already exist.
* @returns {Promise<string>} - The participant's survey ID.
*/
export async function getSurveyId() {
initializeStorage();
let surveyId = await storageSpace.get("surveyId");
if (surveyId === null) {
surveyId = id.generateId();
await storageSpace.set("surveyId", surveyId);
}
return surveyId;
}
/**
* Gets the status of the current survey. Can be used if a
* subsequent survey depends on the status of the previous survey.
* @returns {Promise<string>|Promise<null>} - The status of the current
* survey ("completed", "cancelled", or "active"), or null if there is no
* current survey.
*/
export async function getSurveyStatus() {
initializeStorage();
const surveyDetails = await storageSpace.get("surveyDetails");
const surveyCompleted = await storageSpace.get("surveyCompleted");
const surveyCancelled = await storageSpace.get("surveyCancelled");
if (!surveyDetails) {
return null;
} else if(surveyCompleted) {
return "completed";
} else if(surveyCancelled) {
return "cancelled";
} else {
return "active";
}
}
/**
* Gets the name of the current survey.
* @returns {Promise<string>|Promise<null>} - The name of the current survey. Returns null
* if there is no current survey.
*/
export async function getSurveyName() {
initializeStorage();
const surveyDetails = await storageSpace.get("surveyDetails");
return surveyDetails ? surveyDetails.surveyName : null;
}
/**
* End the current survey. If there is a current survey, you must call
* this function before starting a new survey.
* @returns {Promise} A Promise that resolves when the survey has been
* ended.
*/
export async function endSurvey() {
// Stop prompting for the survey.
setPopupToNoPromptPage();
// If there is an existing survey reminder timeout, clears the timeout.
clearTimeout(reminderTimeoutId);
// Remove any previously added listener for browser.webRequest.onBeforeRequest
// that checks for the survey completion URL.
browser.webRequest.onBeforeRequest.removeListener(surveyCompletionUrlListener);
initializeStorage();
// Clears the the data in storage for the current survey.
await storageSpace.set("lastSurveyRequest", null);
await storageSpace.set("surveyCompleted", false);
await storageSpace.set("surveyCancelled", false);
await storageSpace.set("surveyDetails", null);
}
<|start_filename|>src/id.js<|end_filename|>
/**
* This module provides functionality for generating random identifiers.
* Studies can use these identifiers to uniquely label events and other
* items of interest.
* @module id
*/
import { v4 as uuidv4 } from 'uuid';
/**
* Generate a random (v4) UUID, consistent with RFC4122. These values
* include 122 bits of cryptographic randomness.
* @returns {string} The new UUID.
*/
export function generateId() {
return uuidv4();
}
<|start_filename|>src/pageText.js<|end_filename|>
/**
* This module enables analyzing the text content of webpages, including with
* natural language processing methods. The module uses Mozilla Readability
* in a content script to parse document title and content when possible.
*
* ## Training, Testing, and Deploying Natural Language Processing Models
* A motivating use case for this module is applying natural language
* processing methods to webpage text. The module provides infrastructure for
* NLP models, but leaves implementation and evaluation of models to study
* authors. We recommend using existing toolkits for NLP feature generation
* (e.g., Natural or NLP.js) and for working with models (e.g., TensorFlow.js,
* ONNX.js, WebDNN, or sklearn-porter). We also recommend using the same
* codebase for collecting data (e.g., with web crawls), constructing models,
* evaluating models, and deploying models in browser-based studies. When
* maintaining multiple NLP codebases for a browser-based study, subtle
* inconsistencies are easy to introduce and can call into question NLP model
* performance.
*
* ## Web Crawls to Collect Natural Language Processing Training Data
* Because WebScience integrates with ordinary browser extensions, you can
* use this module in a web crawl to collect page text content as NLP training
* data. All the major browser automation toolkits (e.g., Selenium, Puppeteer,
* Playwright, and WebdriverIO) support running web crawls with browser
* extensions installed. We recommend running an online crawl to collect NLP
* data, using this module to extract webpage text, then training and testing
* models offline. If you use web crawl data to construct an NLP model for a
* browser-based study, be sure to carefully consider how the distribution
* of pages in the crawl compares to the distribution of pages that a user in
* the study might visit. If a crawl is not representative of user browsing,
* NLP model performance on crawl data might significantly differ from
* performance when deployed in a browser-based study.
*
* ## Implementing Natural Language Processing in Web Workers
* Because natural language processing methods can be computationally
* expensive, it is very important to offload NLP tasks from an extension's
* main thread. We recommend pairing this module with the `workers` module to
* implement NLP tasks inside of Web Workers, which run in separate threads
* and will not block the extension's main thread. Some NLP toolkits support
* additional optimizations, such as WebAssembly or WebGL, and we recommend
* enabling all available optimizations to minimize the possibility of impact
* on the user's browsing experience.
*
* @see {@link https://github.com/mozilla/readability}
* @see {@link https://github.com/NaturalNode/natural}
* @see {@link https://github.com/axa-group/nlp.js}
* @see {@link https://www.tensorflow.org/js}
* @see {@link https://github.com/microsoft/onnxjs}
* @see {@link https://mil-tokyo.github.io/webdnn/}
* @see {@link https://github.com/nok/sklearn-porter}
* @module pageText
*/
import * as messaging from "./messaging.js";
import * as matching from "./matching.js";
import * as events from "./events.js";
import * as pageManager from "./pageManager.js";
import * as permissions from "./permissions.js";
import pageTextContentScript from "include:./content-scripts/pageText.content.js";
/**
* A listener for the `onTextParsed` event.
* @callback textParsedListener
* @memberof module:pageText.onTextParsed
* @param {Object} details - Additional information about the page data event.
* @param {string} details.pageId - The ID for the page, unique across browsing sessions.
* @param {string} details.url - The URL of the page, without any hash.
* @param {string} details.title - The title of the document, parsed by Readability.
* @param {string} details.content - The document text content as an HTML string, parsed by Readability.
* @param {string} details.textContent - The document text content with HTML tags removed, parsed by Readability.
* @param {boolean} details.privateWindow - Whether the page loaded in a private window.
*/
/**
* @typedef {Object} TextParsedListenerRecord
* @property {matching.MatchPatternSet} matchPatternSet - The match patterns for the listener.
* @property {boolean} privateWindows - Whether to notify the listener about pages in private windows.
* @property {browser.contentScripts.RegisteredContentScript} contentScript - The content
* script associated with the listener.
* @private
*/
/**
* A map where each key is a listener and each value is a record for that listener.
* @constant {Map<textParsedListener, TextParsedListenerRecord>}
* @private
*/
const textParsedListeners = new Map();
/**
* Add a listener for the `onTextParsed` event.
* @function addListener
* @memberof module:pageText.onTextParsed
* @param {textParsedListener} listener - The listener to add.
* @param {Object} options - Options for the listener.
* @param {string[]} options.matchPatterns - The webpages where the listener should be notified about page text.
* @param {boolean} [options.privateWindows=false] - Whether to notify the listener about pages in private windows.
*/
/**
* Remove a listener for the `onTextParsed` event.
* @function removeListener
* @memberof module:pageText.onTextParsed
* @param {textParsedListener} listener - The listener to remove.
*/
/**
* Whether a specified listener has been added for the `onTextParsed` event.
* @function hasListener
* @memberof module:pageText.onTextParsed
* @param {textParsedListener} listener - The listener to check.
* @returns {boolean} Whether the listener has been added for the event.
*/
/**
* Whether the `onTextParsed` event has any listeners.
* @function hasAnyListeners
* @memberof module:pageText.onTextParsed
* @returns {boolean} Whether the event has any listeners.
*/
/**
* An event that fires when a page's text content has been parsed with Readability. If the text
* content is not parseable, this event does not fire.
* @namespace
*/
export const onTextParsed = events.createEvent({
name: "webScience.pageText.onTextParsed",
addListenerCallback: addListener,
removeListenerCallback: removeListener,
notifyListenersCallback: () => { return false; }
});
/**
* Whether the module has completed initialization.
* @type {boolean}
* @private
*/
let initialized = false;
/**
* A callback function for adding a text parsed listener. The options for this private function must
* be kept in sync with the options for the public `onTextParsed.addListener` function.
* @param {textParsedListener} listener - The listener being added.
* @param {Object} options - Options for the listener.
* @param {string[]} options.matchPatterns - The match patterns for pages where the listener should
* be notified.
* @param {boolean} [options.privateWindows=false] - Whether the listener should be notified for
* pages in private windows.
* @private
*/
async function addListener(listener, {
matchPatterns,
privateWindows = false
}) {
// Initialization
if (!initialized) {
initialized = true;
await pageManager.initialize();
// Listen for content script messages
messaging.onMessage.addListener(textParsedDetails => {
// Remove the type string from the content script message
delete textParsedDetails.type;
// Notify listeners when the private window and match pattern requirements are met
for (const [listener, listenerRecord] of textParsedListeners) {
if ((!textParsedDetails.privateWindow || listenerRecord.privateWindows)
&& (listenerRecord.matchPatternSet.matches(textParsedDetails.url))) {
listener(textParsedDetails);
}
}
},
{
type: "webScience.pageText.parsedText",
schema: {
pageId: "string",
url: "string",
title: "string",
content: "string",
textContent: "string",
privateWindow: "boolean"
}
});
// Notify the content script when there is a new Readability status
// for a page and the page URL matches at least one listener
messaging.registerSchema("webScience.pageText.isArticle", {
isArticle: "boolean"
});
browser.tabs.onUpdated.addListener((tabId, changeInfo, tab) => {
if("isArticle" in changeInfo && "url" in tab) {
// Test match patterns here rather than in the tabs.onUpdated
// listener options so we don't have to manage multiple listeners
// or remove and add the listener while events might be queued
for (const listenerRecord of textParsedListeners.values()) {
if (listenerRecord.matchPatternSet.matches(tab.url)) {
messaging.sendMessageToTab(tabId, {
type: "webScience.pageText.isArticle",
isArticle: tab.isArticle
});
break;
}
}
}
}, {
urls: permissions.getManifestOriginMatchPatterns(),
properties: [ "isArticle" ]
});
}
// Compile the match patterns for the listener
const matchPatternSet = matching.createMatchPatternSet(matchPatterns);
// Register a content script for the listener
const contentScript = await browser.contentScripts.register({
matches: matchPatterns,
js: [{
file: pageTextContentScript
}],
runAt: "document_idle"
});
// Store a record for the listener
textParsedListeners.set(listener, {
matchPatternSet,
contentScript,
privateWindows
});
}
/**
* A callback function for removing a text parsed listener.
* @param {textParsedListener} listener - The listener that is being removed.
* @private
*/
function removeListener(listener) {
// If there is a record of the listener, unregister its content script
// and delete the record
const listenerRecord = textParsedListeners.get(listener);
if (listenerRecord === undefined) {
return;
}
listenerRecord.contentScript.unregister();
textParsedListeners.delete(listener);
}
<|start_filename|>src/content-scripts/linkResolution.googleNews.content.js<|end_filename|>
/**
* Content script for the `linkResolution` module that parses links from Google News pages.
* This parsing is fragile and, by design, degrades gracefully to resolving links with
* HTTP requests.
* @module linkResolution.googleNews.content
*/
import { Base64 } from "js-base64";
function pageManagerLoaded() {
const pageManager = window.webScience.pageManager;
/**
* How often, in milliseconds, to tick the timer for checking links when the page has attention.
* @constant {number}
*/
const timerInterval = 500;
/**
* The anchor elements that have already been checked on the page.
* @type {WeakSet<HTMLAnchorElement>}
*/
let checkedAnchorElements = new WeakSet();
/**
* The timeout ID for timer ticks when the page has attention.
* @type {number}
*/
let timeoutID = -1;
/**
* Whether the page is currently between page visit start and
* page visit stop events.
*/
let inPageVisit = false;
/**
* A listener for pageManager.onPageVisitStart that resets
* page variables and starts the timer ticking if the page
* has attention.
*/
function pageVisitStartListener() {
checkedAnchorElements = new WeakSet();
timeoutID = -1;
inPageVisit = true;
if(pageManager.pageHasAttention) {
timerTick();
}
}
pageManager.onPageVisitStart.addListener(pageVisitStartListener);
if(pageManager.pageVisitStarted) {
pageVisitStartListener();
}
/**
* A listener for pageManager.onPageVisitStop that
* clears the ticking timer.
*/
function pageVisitStopListener() {
clearTimeout(timeoutID);
}
pageManager.onPageVisitStop.addListener(pageVisitStopListener);
/**
* A listener for pageManager.onPageAttentionUpdate that
* clears the ticking timer if the page loses attention
* and starts the ticking timer if the page gains
* attention.
*/
function pageAttentionUpdateListener() {
// Ignore attention events when we aren't between page visit
// start and page visit stop events
if(!inPageVisit) {
return;
}
if(!pageManager.pageHasAttention) {
clearTimeout(timerTick);
}
else {
timerTick();
}
}
pageManager.onPageAttentionUpdate.addListener(pageAttentionUpdateListener);
/**
* When the timer ticks, check all the anchor elements in the document that haven't already been
* checked.
*/
function timerTick() {
const urlMappings = [ ];
// Iterate through all the anchor elements in the document. We don't specify Google News
// article URLs in the CSS selector because the anchor element href could be relative
// (with various formats) or absolute.
const anchorElements = document.querySelectorAll("a[href]");
for(const anchorElement of anchorElements) {
try {
// Ignore links that we've already checked
if(checkedAnchorElements.has(anchorElement)) {
continue;
}
// If this is a Google News article link, try to parse a URL mapping
const urlObj = new URL(anchorElement.href, window.location.href);
if((urlObj.hostname === "news.google.com") && urlObj.pathname.startsWith("/articles/")) {
const destinationUrl = parseDestinationUrl(anchorElement);
if(destinationUrl !== null) {
// If we were able to parse a URL mapping, ignore the anchor element in future.
// Since Google can dynamically add jslog attributes, we might need to check an
// element multiple times.
checkedAnchorElements.add(anchorElement);
urlMappings.push({
sourceUrl: urlObj.href,
destinationUrl,
ignoreSourceUrlParameters: true
});
}
}
}
catch {
continue;
}
}
// Notify the background script
if(urlMappings.length > 0) {
browser.runtime.sendMessage({
type: "webScience.linkResolution.registerUrlMappings",
pageId: pageManager.pageId,
urlMappings
});
}
// If the page has attention, set another timer tick
if(pageManager.pageHasAttention) {
timeoutID = setTimeout(timerTick, timerInterval);
}
}
/**
* Attempt to parse the destination URL from an anchor element where the href
* is a Google News article link. This function relies on the `jslog` attribute
* of the anchor element or a parent article tag.
* @param {HTMLAnchorElement} anchorElement - The anchor element.
* @returns {string|null} The parsed destination URL, or null if parsing was not
* successful.
*/
function parseDestinationUrl(anchorElement) {
const elements = new Set([ anchorElement ]);
// Consider the parent element if it's an article tag, since previously
// jslog was set on that element instead of the anchor element
if(anchorElement.parentElement.tagName === "ARTICLE") {
elements.add(anchorElement.parentElement);
}
for(const element of elements) {
// The destination URL is typically available in a jslog attribute,
// which is a list of properties separated by "; ". When the URL has
// a "2:" prepended, it's just the raw URL. When the URL has a "5:"
// prepended, it's an array encoded with Base64 where one entry is
// the URL. The URL can have unicode characters encoded.
const jsLogAttributeValue = element.getAttribute("jslog");
if(jsLogAttributeValue === null) {
continue;
}
const jsLogTokens = jsLogAttributeValue.split("; ");
for (const jsLogToken of jsLogTokens) {
if(jsLogToken.startsWith("2:")) {
try {
const urlObj = new URL(decodeURIComponent(jsLogToken.substring(2)));
return urlObj.href;
}
catch {
continue;
}
}
else if(jsLogToken.startsWith("5:")) {
try {
// We have to use a third-party Base64 decoder rather than the built-in
// atob function because the string might include encoded Unicode
// characters, which cause an error in atob.
const decodedJsLog = Base64.decode(jsLogToken.substring(2));
// Quotation marks might be escaped with a \ in the URL, so unescape them.
const unescapedDecodedJsLog = decodedJsLog.replaceAll(`\\"`, `"`);
const values = JSON.parse(`{ "values": ${unescapedDecodedJsLog} }`).values;
if(!Array.isArray(values)) {
continue;
}
for(const value of values) {
if(typeof value === "string") {
const urlObj = new URL(decodeURIComponent(value));
return urlObj.href;
}
}
}
catch {
continue;
}
}
}
}
return null;
}
}
// Wait for pageManager load
if (("webScience" in window) && ("pageManager" in window.webScience)) {
pageManagerLoaded();
}
else {
if(!("pageManagerHasLoaded" in window))
window.pageManagerHasLoaded = [];
window.pageManagerHasLoaded.push(pageManagerLoaded);
}
<|start_filename|>.eslintrc.js<|end_filename|>
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
module.exports = {
env: {
browser: true,
es6: true,
node: true,
webextensions: true,
},
extends: [
"eslint:recommended",
"plugin:import/warnings",
"plugin:node/recommended",
],
overrides: [
{
files: "tests/**",
env: {
mocha: true,
},
extends: [
"plugin:mocha/recommended",
],
},
],
globals: {
ChromeUtils: false,
ExtensionAPI: false,
__ENABLE_DEVELOPER_MODE__: false,
},
parserOptions: {
ecmaVersion: 2021,
sourceType: "module",
},
plugins: [
"import",
"node",
"mocha"
],
root: true,
rules: {
"node/no-deprecated-api": "error",
"node/no-extraneous-require": "off",
"node/no-missing-import": "off",
"node/no-unpublished-import": "off",
"node/no-unpublished-require": "off",
"node/no-unsupported-features/es-syntax": "off",
"no-multi-spaces": "error",
"no-unused-vars": [ "error", { vars: "all", args: "none", ignoreRestSiblings: false } ],
"no-var": "warn",
"prefer-const": "warn",
},
};
<|start_filename|>src/browser-action-assets/userSurvey.popupPrompt.js<|end_filename|>
(async function() {
// Load the message from storage
const messageStorageKey = "webScience.userSurvey.popupPromptMessage";
const messageResults = await browser.storage.local.get(messageStorageKey);
if(messageResults[messageStorageKey]) {
const contentElement = document.getElementById("content");
contentElement.textContent = messageResults[messageStorageKey];
}
// Load icon URL from storage
const iconUrlStorageKey = "webScience.userSurvey.popupIconUrl";
const iconUrlResults = await browser.storage.local.get(iconUrlStorageKey);
if(iconUrlResults[iconUrlStorageKey]) {
const iconElement = document.getElementById("icon");
iconElement.src = iconUrlResults[iconUrlStorageKey];
iconElement.style.display = "block";
}
// Listen for clicks on the buttons
document.addEventListener("click", async (e) => {
if (e.target.name === "agree") {
await browser.runtime.sendMessage({ type: "webScience.userSurvey.openSurvey" });
}
else if (e.target.name === "later") {
window.close();
}
else if (e.target.name === "never") {
await browser.runtime.sendMessage({ type: "webScience.userSurvey.cancelSurvey" });
window.close();
}
});
})();
<|start_filename|>src/linkResolution.js<|end_filename|>
/**
* This module provides functionality for resolving shortened and shimmed URLs.
* @module linkResolution
*/
import * as matching from "./matching.js";
import * as permissions from "./permissions.js";
import * as id from "./id.js";
import * as pageManager from "./pageManager.js";
import * as messaging from "./messaging.js";
import { urlShortenerMatchPatterns } from "./data/urlShorteners.js";
import { ampCacheDomains, ampViewerDomainsAndPaths } from "./data/ampCachesAndViewers.js";
import { parse as tldtsParse } from "tldts";
import linkResolutionTwitterContentScript from "include:./content-scripts/linkResolution.twitter.content.js";
import linkResolutionGoogleNewsContentScript from "include:./content-scripts/linkResolution.googleNews.content.js";
// AMP caches and viewers
/**
* A RegExp that matches and parses AMP cache and viewer URLs. If there is a match, the RegExp provides several
* named capture groups.
* * AMP Cache Matches
* * `ampCacheSubdomain` - The subdomain, which should be either a reformatted version of the
* URL domain or a hash of the domain. If there is no subdomain, this capture group
* is `undefined`.
* * `ampCacheDomain` - The domain for the AMP cache.
* * `ampCacheContentType` - The content type, which is either `c` for an HTML document, `i` for
* an image, or `r` for another resource.
* * `ampCacheIsSecure` - Whether the AMP cache loads the resource via HTTPS. If it does, this
* capture group has the value `s/`. If it doesn't, this capture group is `undefined`.
* * `ampCacheUrl` - The underlying URL, without a specified scheme (i.e., `http://` or `https://`).
* * AMP Viewer Matches
* * `ampViewerDomainAndPath` - The domain and path for the AMP viewer.
* * `ampViewerUrl` - The underlying URL, without a specified scheme (i.e., `http://` or `https://`).
* @see {@link https://developers.google.com/amp/cache/overview}
* @see {@link https://amp.dev/documentation/guides-and-tutorials/learn/amp-caches-and-cors/amp-cache-urls/}
* @constant {RegExp}
*/
export const ampRegExp = /*@__PURE__*/(() => new RegExp(
// AMP cache regular expression
`(?:^https?://(?:(?<ampCacheSubdomain>[a-zA-Z0-9\\-\\.]*)\\.)?(?<ampCacheDomain>${ampCacheDomains.map(matching.escapeRegExpString).join("|")})/(?<ampCacheContentType>c|i|r)/(?<ampCacheIsSecure>s/)?(?<ampCacheUrl>.*)$)`
+ `|` +
// AMP viewer regular expression
`(?:^https?://(?<ampViewerDomainAndPath>${ampViewerDomainsAndPaths.map(matching.escapeRegExpString).join("|")})/(?<ampViewerUrl>.*)$)`
, "i"))();
/**
* A MatchPatternSet for AMP caches and viewers.
* @constant {matching.MatchPatternSet}
*/
export const ampMatchPatternSet = /*@__PURE__*/(() => matching.createMatchPatternSet(
matching.domainsToMatchPatterns(ampCacheDomains, false).concat(
ampViewerDomainsAndPaths.map(ampViewerDomainAndPath => `*://${ampViewerDomainAndPath}*`))))();
/**
* Parse the underlying URL from an AMP cache or viewer URL, if the URL is an AMP cache or viewer URL.
* @param {string} url - A URL that may be an AMP cache or viewer URL.
* @returns {string} If the URL is an AMP cache or viewer URL, the parsed underlying URL. Otherwise, just the URL.
*/
export function parseAmpUrl(url) {
if(!ampRegExp.test(url))
return url;
const parsedAmpUrl = ampRegExp.exec(url);
// Reconstruct AMP cache URLs
if(parsedAmpUrl.groups.ampCacheUrl !== undefined)
return "http" +
((parsedAmpUrl.groups.ampCacheIsSecure === "s") ? "s" : "") +
"://" +
parsedAmpUrl.groups.ampCacheUrl;
// Reconstruct AMP viewer URLs, assuming the protocol is HTTPS
return "https://" + parsedAmpUrl.groups.ampViewerUrl;
}
// Facebook link shims
/**
* A RegExp for matching URLs that have had Facebook's link shim applied.
* @constant {RegExp}
*/
export const facebookLinkShimRegExp = /^https?:\/\/l.facebook.com\/l\.php\?u=/;
/**
* Parse a URL from Facebook's link shim, if the shim was applied to the URL.
* @param {string} url - A URL that may have Facebook's link shim applied.
* @returns {string} If Facebook's link shim was applied to the URL, the unshimmed URL. Otherwise, just the URL.
*/
export function parseFacebookLinkShim(url) {
if(!facebookLinkShimRegExp.test(url))
return url;
// Extract the original URL from the "u" parameter
const urlObject = new URL(url);
const uParamValue = urlObject.searchParams.get('u');
if(uParamValue === null)
return url;
return uParamValue;
}
/**
* Remove Facebook link decoration (the `fbclid` paramater) from a URL, if present.
* @param {string} url - A URL that may have Facebook link decoration.
* @returns {string} The URL without Facebook link decoration.
*/
export function removeFacebookLinkDecoration(url) {
const urlObj = new URL(url);
urlObj.searchParams.delete("fbclid");
return urlObj.href;
}
// URL shorteners
/**
* An array of match patterns for known URL shorteners, loaded from `urlShortenerMatchPatterns.js`.
* @constant {string[]}
*/
export { urlShortenerMatchPatterns };
/**
* A RegExp for known URL shorteners, based on the match patterns loaded from `urlShortenerMatchPatterns.js`.
* @constant {RegExp}
*/
export const urlShortenerRegExp = matching.matchPatternsToRegExp(urlShortenerMatchPatterns);
/**
* A matching.MatchPatternSet for known URL shorteners, based on the match patterns loaded from `urlShortenerMatchPatterns.js`.
* @constant {matching.MatchPatternSet}
*/
export const urlShortenerMatchPatternSet = matching.createMatchPatternSet(urlShortenerMatchPatterns);
// Public suffix + 1
/**
* Extracts the public suffix + 1 from a URL.
* @param {string} url - The URL.
* @returns {string} The public suffix + 1.
* @example <caption>Example usage of urlToPS1.</caption>
* // returns "mozilla.org"
* urlToPS1("https://www.mozilla.org/");
*/
export function urlToPS1(url) {
return tldtsParse((new URL(url)).hostname).domain;
}
// URL resolution
/**
* The timeout (in ms) for fetch when resolving a link.
* @constant {number}
* @private
* @default
*/
const fetchTimeout = 5000;
/**
* The maximum number of redirects to follow with fetch when resolving a link.
* @constant {number}
* @private
* @default
*/
const fetchMaxRedirects = 3;
/**
* Whether the module has been initialized.
* @type {boolean}
* @private
*/
let initialized = false;
/**
* A map where each key is a webRequest requestId and each value is a link resolution ID.
* @constant {Map<string, string>}
* @private
*/
const requestIdToLinkResolutionId = new Map();
/**
* @typedef {Object} LinkResolutionData
* @property {Function} resolve - The resolve function for the link resolution Promise.
* @property {Function} reject - The reject function for the link resolution Promise.
* @property {number} timeoutId - The timeout ID for the link resolution fetch.
* @property {string} requestId - The webRequest requestId for the link resolution fetch.
* @property {number} redirects - The number of redirects in the link resolution fetch.
* @property {boolean} parseAmpUrl - If the resolved URL is an AMP URL, parse it.
* @property {boolean} parseFacebookLinkShim - If the resolved URL has a Facebook shim applied, parse it.
* @property {boolean} removeFacebookLinkDecoration - If the resolved URL has Facebook link decoration, remove it.
* @property {boolean} onlyRequestKnownUrlShorteners - If the resolution should only issue HTTP requests for
* known URL shorteners, and should treat all other origins as resolved (i.e., if a known shortener has a 3xx
* redirect to an origin that isn't a known shortener, treat that redirection target as the resolved URL). When
* this value is false, resolution will follow all redirects until either loading completes, the redirect limit is
* reached, or there is an error.
* @private
*/
/**
* A map where each key is a link resolution ID (randomly generated) and each value is a
* Promise to resolve when resolution is complete.
* @constant {Map<string, LinkResolutionData>}
* @private
*/
const linkResolutionIdToData = new Map();
/**
* A special HTTP header name to use for associating a link resolution ID with an outbound
* request.
* @constant {string}
* @private
* @default
*/
const httpHeaderName = "X-WebScience-LinkResolution-ID";
/**
* Resolve a shortened or shimmed URL to an original URL, by recursively resolving the URL and removing shims.
* @param {string} url - The URL to resolve.
* @param {Object} [options] - Options for resolving the URL.
* @param {boolean} [options.parseAmpUrl=true] - If the resolved URL or the original URL is an AMP URL, parse it. See
* `parseAmpUrl` for detais.
* @param {boolean} [options.parseFacebookLinkShim=true] - If the resolved URL or the original URL has a Facebook shim
* applied, parse it. See `parseFacebookLinkShim` for detais.
* @param {boolean} [options.removeFacebookLinkDecoration=true] - If the resolved URL or the original URL has Facebook link
* decoration, remove it. See `removeFacebookLinkDecoration` for details.
* @param {boolean} [options.applyRegisteredUrlMappings=true] - If the original URL matches a registered URL mapping, apply
* the mapping. See `registerUrlMappings` for details.
* @param {string} [options.request="known_shorteners"] - Whether to issue HTTP requests to resolve the URL,
* following HTTP 3xx redirects. Valid values are "always", "known_shorteners" (only issue a request if the original URL or
* a redirection target URL matches a known URL shortener), and "never". Note that setting this value to "always" could have
* performance implications, since it requires completely loading the destination URL.
* @returns {Promise<string>} - A Promise that either resolves to the original URL or is rejected with an error.
*/
export function resolveUrl(url, options) {
// Using the pre-ES6 approach to default arguments to avoid ambiguity with function names
if(!(typeof options === "object")) {
options = { };
}
options.parseAmpUrl = "parseAmpUrl" in options ? options.parseAmpUrl : true;
options.parseFacebookLinkShim = "parseFacebookLinkShim" in options ? options.parseFacebookLinkShim : true;
options.removeFacebookLinkDecoration = "removeFacebookLinkDecoration" in options ? options.removeFacebookLinkDecoration : true;
options.applyRegisteredUrlMappings = "applyRegisteredUrlMappings" in options ? options.applyRegisteredUrlMappings : true;
options.request = "request" in options ? options.request : "known_shorteners";
initialize();
if(options.parseAmpUrl) {
url = parseAmpUrl(url);
}
if(options.parseFacebookLinkShim) {
url = parseFacebookLinkShim(url);
}
if(options.removeFacebookLinkDecoration) {
url = removeFacebookLinkDecoration(url);
}
if(options.applyRegisteredUrlMappings) {
url = applyRegisteredUrlMappings(url);
}
// If we don't need to resolve the URL, just return the current URL value in a Promise
if((options.request === "never") ||
((options.request === "known_shorteners") && !urlShortenerMatchPatternSet.matches(url))) {
return Promise.resolve(url);
}
// Resolve the URL
// The webRequest API tracks HTTP request lifecycle with a unique requestId value, which we
// can match to this link resolution by generating a random link resolution ID, inserting the
// link resolution ID as a special HTTP header when fetching the link, observing HTTP headers
// with webRequest to match the link resolution ID to a webRequest requestId, then removing
// the special HTTP header before the request is sent
const linkResolutionId = id.generateId();
const controller = new AbortController();
const init = {
signal: controller.signal,
// Don't include cookies or a User-Agent with the request, because they can cause shorteners
// to provide HTML/JS redirects rather than HTTP redirects
credentials: "omit",
headers: {
"User-Agent": "",
[httpHeaderName]: linkResolutionId
}
};
// Special Cases
const urlObj = new URL(url);
// Twitter (t.co)
// Removing the amp=1 parameter results in more reliable HTTP redirects instead of HTML/JS redirects
if(urlObj.hostname === "t.co") {
urlObj.searchParams.delete("amp");
}
// Google News (news.google.com)
// Setting the User-Agent to curl results in more reliable HTTP redirects instead of HTML/JS redirects
if(urlObj.hostname.endsWith("news.google.com")) {
init.headers["User-Agent"] = "curl/7.10.6 (i386-redhat-linux-gnu) libcurl/7.10.6 OpenSSL/0.9.7a ipv6 zlib/1.1.4";
}
url = urlObj.href;
// Fetch the URL with a timeout, discarding the outcome of the fetch Promise because the logic for
// resolving URLs is in the webRequest handlers (which have greater permissions for inspecting and
// modifying the HTTP request lifecycle)
fetch(url, init).then(() => {}, () => {});
const timeoutId = setTimeout(() => {
controller.abort();
completeResolution(linkResolutionId, false, undefined, "Error: webScience.linkResolution.resolveUrl fetch request timed out.");
}, fetchTimeout);
// Store the link resolution data, including promise resolve and reject functions
return new Promise((resolve, reject) => {
linkResolutionIdToData.set(linkResolutionId, {
resolve,
reject,
timeoutId,
requestId: -1,
redirects: 0,
parseAmpUrl,
parseFacebookLinkShim,
removeFacebookLinkDecoration,
onlyRequestKnownUrlShorteners: options.request === "known_shorteners"
});
});
}
/**
* Complete resolution of a link via HTTP requests, under circumstances specified in the arguments.
* @param {string} linkResolutionId - The link resolution ID.
* @param {boolean} success - Whether link resolution was successful.
* @param {string} [resolvedUrl] - The URL that resulted from resolution.
* @param {string} [errorMessage] - An error message because resolution failed.
* @private
*/
function completeResolution(linkResolutionId, success, resolvedUrl, errorMessage) {
const linkResolutionData = linkResolutionIdToData.get(linkResolutionId);
const resolve = linkResolutionData.resolve;
const reject = linkResolutionData.reject;
clearTimeout(linkResolutionData.timeoutId);
if(success) {
if(linkResolutionData.parseAmpUrl) {
resolvedUrl = parseAmpUrl(resolvedUrl);
}
if(linkResolutionData.parseFacebookLinkShim) {
resolvedUrl = parseFacebookLinkShim(resolvedUrl);
}
if(linkResolutionData.removeFacebookLinkDecoration) {
resolvedUrl = removeFacebookLinkDecoration(resolvedUrl);
}
}
// Remove the data structure entries for this link resolution
if(linkResolutionData.requestId !== "") {
requestIdToLinkResolutionId.delete(linkResolutionData.requestId);
}
linkResolutionIdToData.delete(linkResolutionId);
if(success) {
resolve(resolvedUrl);
return;
}
reject(errorMessage);
}
/**
* A listener for the browser.webRequest.onBeforeSendHeaders event. This listener blocks
* the request, extracts the link resolution ID from a header, updates the link
* resolution data structures, and removes the header. This listener also cancels the
* request if it exceeds the redirect limit.
* @param {Object} details - Details about the request.
* @returns {browser.webRequest.BlockingResponse|undefined}
* @private
*/
function onBeforeSendHeadersListener(details) {
let linkResolutionId = undefined;
let resolutionData = undefined;
let requestHeaderIndex = -1;
for(let i = 0; i < details.requestHeaders.length; i++) {
const requestHeader = details.requestHeaders[i];
if(requestHeader.name === httpHeaderName) {
linkResolutionId = requestHeader.value;
requestHeaderIndex = i;
break;
}
}
// If the HTTP request header includes a link resolution ID, update the
// internal data structures to associate that link resolution ID with
// the webRequest requestId
if(linkResolutionId !== undefined) {
resolutionData = linkResolutionIdToData.get(linkResolutionId);
if(resolutionData !== undefined) {
resolutionData.requestId = details.requestId;
requestIdToLinkResolutionId.set(details.requestId, linkResolutionId);
}
}
// If the HTTP request header doesn't include a link resolution ID,
// we might already have a mapping from the webRequest requestId to
// the link resolution ID
else {
linkResolutionId = requestIdToLinkResolutionId.get(details.requestId);
if(linkResolutionId !== undefined) {
resolutionData = linkResolutionIdToData.get(linkResolutionId);
}
}
// If this link resolution should only issue HTTP requests to known
// URL shorteners, and this is not a request to a known URL shortener,
// consider the link resolved and cancel the request
if((resolutionData !== undefined) &&
resolutionData.onlyRequestKnownUrlShorteners &&
!urlShortenerMatchPatternSet.matches(details.url)) {
completeResolution(linkResolutionId, true, details.url, undefined);
return {
cancel: true
};
}
// Check the redirect limit and cancel the request if it's exceeded
if((resolutionData !== undefined) &&
resolutionData.redirects > fetchMaxRedirects) {
completeResolution(linkResolutionId, false, undefined, "Error: webScience.linkResolution.resolveUrl fetch request exceeded redirect limit.");
return {
cancel: true
};
}
// If there's an HTTP header with the link resolution ID, remove it
if(requestHeaderIndex >= 0) {
details.requestHeaders.splice(requestHeaderIndex, 1);
return {
requestHeaders: details.requestHeaders
};
}
}
/**
* Listener for webRequest.onBeforeRedirect.
* @param {Object} details - Details about the request.
* @private
*/
function onBeforeRedirectListener(details) {
const linkResolutionId = requestIdToLinkResolutionId.get(details.requestId);
if(linkResolutionId !== undefined) {
const linkResolutionData = linkResolutionIdToData.get(linkResolutionId);
linkResolutionData.redirects++;
}
}
/**
* Listener for webRequest.onCompleted.
* @param {Object} details - Details about the request.
* @private
*/
function onCompletedListener(details) {
const linkResolutionId = requestIdToLinkResolutionId.get(details.requestId);
if(linkResolutionId !== undefined) {
completeResolution(linkResolutionId, true, details.url, undefined);
}
}
/**
* Listener for webRequest.onErrorOccurred.
* @param {Object} details - Details of the error.
* @private
*/
function onErrorListener(details) {
const linkResolutionId = requestIdToLinkResolutionId.get(details.requestId);
if(linkResolutionId !== undefined) {
completeResolution(linkResolutionId, false, undefined, "Error: webScience.linkResolution.resolveUrl fetch request encountered a network error.");
}
}
/**
* Initialize the module, registering event listeners for `resolveUrl` and built-in content scripts for parsing
* and registering URL mappings (currently Twitter and Google News). Runs only once. This function is automatically
* called by `resolveUrl`, but you can call it separately if you want to use registered URL mappings without
* `resolveUrl`.
*/
export function initialize() {
if(initialized) {
return;
}
initialized = true;
permissions.check({
module: "webScience.linkResolution",
requiredPermissions: [ "webRequest", "webRequestBlocking" ],
suggestedOrigins: [ "<all_urls>" ]
});
// URL resolution via HTTP requests
// Set listeners for webRequest lifecycle events
// By setting the windowId filter to WINDOW_ID_NONE, we can
// ignore requests associated with ordinary web content
browser.webRequest.onBeforeSendHeaders.addListener(onBeforeSendHeadersListener,
{
urls: [ "<all_urls>" ],
windowId: browser.windows.WINDOW_ID_NONE
},
[ "requestHeaders", "blocking" ]);
browser.webRequest.onBeforeRedirect.addListener(onBeforeRedirectListener,
{
urls: [ "<all_urls>" ],
windowId: browser.windows.WINDOW_ID_NONE
});
browser.webRequest.onCompleted.addListener(onCompletedListener,
{
urls: [ "<all_urls>" ],
windowId: browser.windows.WINDOW_ID_NONE
});
browser.webRequest.onErrorOccurred.addListener(onErrorListener,
{
urls: [ "<all_urls>" ],
windowId: browser.windows.WINDOW_ID_NONE
});
// URL mapping parsers in content scripts
// Listen for the page visit stop event, because we should discard URL mappings for that page shortly afterward
pageManager.onPageVisitStop.addListener(pageVisitStopListener);
// Register the content script for parsing URL mappings on Twitter, if the extension has permission for
// Twitter URLs
browser.permissions.contains({ origins: [ "*://*.twitter.com/*" ]}).then(hasPermission => {
if(hasPermission) {
browser.contentScripts.register({
matches: [ "*://*.twitter.com/*" ],
js: [{
file: linkResolutionTwitterContentScript
}],
runAt: "document_idle"
});
}
});
// Register the content script for parsing URL mappings on Google News, if the extension has permission for
// Google News URLs
browser.permissions.contains({ origins: [ "*://*.news.google.com/*" ]}).then(hasPermission => {
if(hasPermission) {
browser.contentScripts.register({
matches: [ "*://*.news.google.com/*" ],
js: [{
file: linkResolutionGoogleNewsContentScript
}],
runAt: "document_idle"
});
}
});
// Register a message listener for URL mappings parsed by content scripts
messaging.onMessage.addListener(urlMappingsContentScriptMessageListener, {
type: "webScience.linkResolution.registerUrlMappings",
schema: {
pageId: "string",
urlMappings: "object"
}
});
}
/**
* @typedef {Object} UrlMapping
* @property {string} sourceUrl - The source URL for the mapping.
* @property {string} destinationUrl - The destination URL for the mapping.
* @property {boolean} ignoreSourceUrlParameters - Whether to ignore parameters when matching URLs against the source URL.
*/
/**
* @typedef {Object} RegisteredUrlMappings
* @property {Function} unregister - Unregister the URL mappings.
*/
/**
* A map of registered URL mappings where keys are source URLs (without parameters if `ignoreSourceUrlParamaters` is
* specified) and values are sets of UrlMapping objects with an additional `registrationId` property.
* @constant {Map<string, Set<UrlMapping>>}
* @private
*/
const registeredUrlMappings = new Map();
/**
* A map of page IDs to sets of registered URL mappings. The mappings are automatically unregistered shortly after
* a page visit ends.
* @constant {Map<string, Set<RegisteredUrlMappings>>}
* @private
*/
const pageIdsWithRegisteredUrlMappings = new Map();
/**
* Register known URL mappings for use in link resolution. This functionality allows studies to minimize HTTP requests
* for link resolution when a URL mapping can be parsed from page content.
* @param {UrlMapping[]} urlMappings - The URL mappings to register.
* @param {string} [pageId=null] - An optional page ID for the page that the URL mappings were parsed from. If a page
* ID is provided, the mappings will be automatically removed shortly after the page visit ends.
* @returns {RegisteredUrlMappings} An object that allows unregistering the URL mappings.
* @example
* // A content script parses URL mappings from a Twitter page, then in the background script:
* webScience.linkResolution.registerUrlMappings([
* {
* sourceUrl: "https://t.co/djogkKUD5y?amp=1",
* destinationUrl: "https://researchday.princeton.edu/",
* ignoreSourceUrlParameters: true
* },
* // Note that the following mapping involves a known URL shortener and would require further resolution
* {
* sourceUrl: "https://t.co/qQTRITLZKP?amp=1",
* destinationUrl: "https://mzl.la/3jh1VgZ",
* ignoreSourceUrlParameters: true
* }
* ]);
*/
export function registerUrlMappings(urlMappings, pageId = null) {
// Generate a unique ID for this registration and maintain a set of registered source URLs,
// so that we can disambiguate in the situation where there are multiple mappings registered
// for the same source URL
const registrationId = id.generateId();
const sourceUrls = new Set();
for(const urlMapping of urlMappings) {
let sourceUrl = urlMapping.sourceUrl;
// If the mapping specifies ignoring the source URL parameters, remove any parameters
if(urlMapping.ignoreSourceUrlParameters) {
const sourceUrlObj = new URL(urlMapping.sourceUrl);
sourceUrlObj.search = "";
sourceUrl = sourceUrlObj.href;
}
sourceUrls.add(sourceUrl);
let registeredUrlMappingsForSourceUrl = registeredUrlMappings.get(sourceUrl);
if(registeredUrlMappingsForSourceUrl === undefined) {
registeredUrlMappingsForSourceUrl = new Set();
registeredUrlMappings.set(sourceUrl, registeredUrlMappingsForSourceUrl);
}
registeredUrlMappingsForSourceUrl.add({
sourceUrl,
destinationUrl: urlMapping.destinationUrl,
ignoreSourceUrlParameters: urlMapping.ignoreSourceUrlParameters,
registrationId
});
}
const unregisterObj = {
// Unregister the registered URL mappings, removing both individual mappings from this
// registration and source URLs that no longer have any mappings
unregister: () => {
// Keep track of source URLs that will have no remaining mappings after removing
// these registered mappings
const sourceUrlsToRemove = new Set();
for(const sourceUrl of sourceUrls) {
const registeredUrlMappingsForSourceUrl = registeredUrlMappings.get(sourceUrl);
if(registeredUrlMappingsForSourceUrl === undefined) {
continue;
}
// Keep track of registered mappings for the source URL to remove
const registeredUrlMappingsToRemove = new Set();
for(const registeredUrlMapping of registeredUrlMappingsForSourceUrl) {
if(registeredUrlMapping.registrationId === registrationId) {
registeredUrlMappingsToRemove.add(registeredUrlMapping);
}
}
for(const registeredUrlMappingToRemove of registeredUrlMappingsToRemove) {
registeredUrlMappingsForSourceUrl.delete(registeredUrlMappingToRemove);
}
if(registeredUrlMappingsForSourceUrl.size === 0) {
sourceUrlsToRemove.add(sourceUrl);
}
}
for(const sourceUrlToRemove of sourceUrlsToRemove) {
registeredUrlMappings.delete(sourceUrlToRemove);
}
}
};
// If a page ID is specified, store the return object in a map so we can call unregister
// when the page visit ends
if(pageId !== null) {
let registeredUrlMappingsForPageId = pageIdsWithRegisteredUrlMappings.get(pageId);
if(registeredUrlMappingsForPageId === undefined) {
registeredUrlMappingsForPageId = new Set();
pageIdsWithRegisteredUrlMappings.set(pageId, registeredUrlMappingsForPageId);
}
registeredUrlMappingsForPageId.add(unregisterObj);
}
return unregisterObj;
}
/***
* Apply the URL mappings that have been registered with `registerUrlMappings`. This function
* first tries to apply a mapping with URL parameters and then tries to apply a mapping without
* URL parameters. If there is no mapping to apply, this function returns the provided URL.
* @param {string} url - The URL to apply registered URL mappings to.
* @returns {string} The provided URL with a URL mapping applied or, if there is no mapping to
* apply, the provided URL.
*/
export function applyRegisteredUrlMappings(url) {
// Try to apply a mapping with parameters
const registeredMappingsForUrl = registeredUrlMappings.get(url);
if(registeredMappingsForUrl !== undefined) {
for(const registeredMappingForUrl of registeredMappingsForUrl) {
if(url === registeredMappingForUrl.sourceUrl) {
return registeredMappingForUrl.destinationUrl;
}
}
}
// Try to apply a mapping without parameters
const urlObj = new URL(url);
urlObj.search = "";
const urlWithoutParameters = urlObj.href;
const registeredMappingsForUrlWithoutParameters = registeredUrlMappings.get(urlWithoutParameters);
if(registeredMappingsForUrlWithoutParameters !== undefined) {
for(const registeredMappingForUrlWithoutParameters of registeredMappingsForUrlWithoutParameters) {
if((urlWithoutParameters === registeredMappingForUrlWithoutParameters.sourceUrl) && registeredMappingForUrlWithoutParameters.ignoreSourceUrlParameters) {
return registeredMappingForUrlWithoutParameters.destinationUrl;
}
}
}
// If there was no mapping to apply, return the input URL
return url;
}
/**
* A listener for messages from the URL parsing content scripts that registers
* parsed URL mappings.
* @param {Object} message - The message from the content script.
* @param {UrlMapping[]} message.urlMappings - The URL mappings parsed by the content script.
* @param {string} message.pageId - The page ID for the page where the URL mappings were parsed.
* @private
*/
function urlMappingsContentScriptMessageListener({ urlMappings, pageId }) {
registerUrlMappings(urlMappings, pageId);
}
/**
* The delay, in milliseconds, to wait after a page visit stop event to remove any
* registered URL mappings associated with the page.
* @constant {number}
* @private
*/
const registeredUrlMappingPageVisitStopExpiration = 5000;
/**
* A listener for the pageManager.onPageVisitStop event that expires registered URL mappings.
* @param {pageManager.PageVisitStopDetails} details
* @private
*/
function pageVisitStopListener({ pageId }) {
const registeredUrlMappingsForPageId = pageIdsWithRegisteredUrlMappings.get(pageId);
if(registeredUrlMappingsForPageId !== undefined) {
setTimeout(() => {
for(const registeredUrlMappingForPageId of registeredUrlMappingsForPageId) {
registeredUrlMappingForPageId.unregister();
}
pageIdsWithRegisteredUrlMappings.delete(pageId);
}, registeredUrlMappingPageVisitStopExpiration);
}
}
<|start_filename|>src/workers.js<|end_filename|>
/**
* This module provides functionality for Web Workers to listen for
* background script events generated by WebScience. The design pattern
* is simple: for each event you want your worker script to observe, use
* `createEventListener` to generate a listener, then add the listener
* to the event. The function will automatically convert background
* script events to messages posted to your worker script.
*
* ## Design Considerations
* We use message passing for WebScience integration with Web Workers,
* because of Content Security Policy considerations. Specifically, if
* we were to replicate WebScience APIs inside of worker scripts, we
* would need to load inlined worker scripts from the background page.
* There is, however, no way to permit only specific inline worker
* scripts in Content Security Policy Level 3. We would have to allow
* all blob URLs to load as worker scripts, which may not be permissible
* for addons.mozilla.org review.
*
* ## Future Directions
* If we identify a path forward for loading worker scripts from blob
* URLs, we could enable a more streamlined design pattern for WebScience
* event listeners in worker scripts.
*
* @module workers
*/
/**
* An object that is posted as a message to a Web Worker when an
* event fires.
* @typedef {Object} WorkerEventData
* @property {string} eventName - The name of the event.
* @property {Array} listenerArguments - The arguments that would be
* passed to an event listener in the background script.
* @example
* // {
* // eventName: "webScience.scheduling.onIdleDaily",
* // listenerArguments: [ ]
* // }
* @example
* // {
* // eventName: "webScience.idle.onStateChanged",
* // listenerArguments: [ "idle" ]
* // }
* @example
* // {
* // eventName: "webScience.pageNavigation.onPageData",
* // listenerArguments: [{
* // pageId: "e60f1f92-f42b-4084-93a7-9e7145e5f716",
* // url: "https://www.example.com/",
* // ...
* // }]
* // }
*/
/**
* Create a listener for a WebScience event that will automatically
* send a message to a Worker when the event occurs.
* @param {Worker} worker - The Worker that should receive messages
* about the event.
* @param {string} [eventName] - The name of the event, which is used
* as the `eventName` property in messages to the Worker. This parameter
* is not necessary when adding the listener to a WebScience event or
* an event created with `events.createEvent` (if an an event name is
* specified), because `eventName` (if not provided as a parameter) will
* be automatically set to the event's name when the listener is added to
* the event.
* @returns {Function} A listener for the event. Each listener should
* only be used with one event.
* @example
* // An example of how to use `workers.createEventListener` for
* // natural language processing in a Web Worker.
*
* // background.js
* const worker = new Worker("worker.js");
* webScience.pageText.onTextParsed.addListener(
* webScience.workers.createEventListener(worker),
* {
* matchPatterns: [ "*://*.example.com/*" ]
* });
*
* // worker.js
* function onTextParsedListener(textParsedDetails) {
* // Natural language processing on webpage text
* }
* self.addEventListener("message", event => {
* if((typeof event.data === "object") &&
* ("eventName" in event.data) &&
* (event.data.eventName === "webScience.pageText.onTextParsed")) {
* onTextParsedListener.apply(null, event.data.listenerArguments);
* }
* });
*/
export function createEventListener(worker, eventName = null) {
const listener = function(...args) {
// If there was an event name parameter, use that as the event name for the message to the worker
// If there was no event name parameter but an event name annotation was set on the listener
// function, use that as the event name for the message to the worker
// Only send a message to the worker if we have an event name for the message
let messageEventName = null;
if(typeof eventName === "string") {
messageEventName = eventName;
}
else if(("webScienceEventName" in listener) && (typeof listener.webScienceEventName === "string")) {
messageEventName = listener.webScienceEventName;
}
if(typeof messageEventName === "string") {
worker.postMessage({
eventName: messageEventName,
listenerArguments: args
});
}
};
return listener;
}
<|start_filename|>src/permissions.js<|end_filename|>
/**
* This module facilitates checking that required permissions are
* provided in the WebExtensions manifest.
*
* @module permissions
*/
import * as matching from "./matching.js";
/**
* An object where keys are Content Security Policy directive names and values are arrays of directive values.
* @typedef {Object} ContentSecurityPolicy
* @example
* {
* "script-src": [ "'self'", "www.example.com" ],
* "object-src": [ 'self' ]
* }
*/
/**
* The Content Security Policy directives specified in the Content Security Policy Level 3 Working Draft.
* @constant {Set<string>}
* @private
*/
const contentSecurityPolicyDirectives = new Set([ "child-src", "connect-src", "default-src", "font-src", "frame-src", "img-src", "manifest-src", "media-src", "object-src", "prefetch-src", "script-src", "script-src-elem", "script-src-attr", "style-src", "style-src-attr", "worker-src" ]);
/**
* The Content Security Policy fallback directives specified in the Content Security Policy Level 3 Working Draft.
* Property names are directive names and property values are arrays of fallback directive names.
* @constant {Object}
* @private
*/
const contentSecurityPolicyDirectiveFallbacks = {
"script-src-elem": [ "script-src-elem", "script-src", "default-src" ],
"script-src-attr": [ "script-src-attr", "script-src", "default-src" ],
"style-src-elem": [ "style-src-elem", "style-src", "default-src" ],
"style-src-attr": [ "style-src-attr", "style-src", "default-src" ],
"worker-src": [ "worker-src", "child-src", "script-src", "default-src" ],
"connect-src": [ "connect-src", "default-src" ],
"manifest-src": [ "manifest-src", "default-src" ],
"prefetch-src": [ "prefetch-src", "default-src" ],
"object-src": [ "object-src", "default-src" ],
"frame-src": [ "frame-src", "child-src", "default-src" ],
"media-src": [ "media-src", "default-src" ],
"font-src": [ "font-src", "default-src" ],
"img-src": [ "img-src", "default-src" ]
}
/**
* Parses a Content Security Policy from a string. We do not validate the manifest Content Security Policy because
* the browser validates it.
* @param {string} contentSecurityPolicyString - The input Content Security Policy string.
* @returns {ContentSecurityPolicy} The parsed Content Security Policy.
* @private
*/
function parseContentSecurityPolicy(contentSecurityPolicyString) {
const parsedContentSecurityPolicy = {};
const directiveNameAndValueStrings = contentSecurityPolicyString.split(/;(?: )*/);
for(const directiveNameAndValueString of directiveNameAndValueStrings) {
const directiveNameAndValueTokens = directiveNameAndValueString.split(/(?: )+/);
if(directiveNameAndValueTokens.length > 0) {
const directiveName = directiveNameAndValueTokens[0];
const directiveValues = directiveNameAndValueTokens.slice(1);
if(contentSecurityPolicyDirectives.has(directiveName)) {
parsedContentSecurityPolicy[directiveName] = directiveValues;
}
}
}
return parsedContentSecurityPolicy;
}
/**
* Check that a directive is provided in a Content Security Policy.
* @param {*} directiveName - The name of the directive to check.
* @param {*} directiveValue - The value of the directive to check.
* @param {*} contentSecurityPolicy - The Content Security Policy to check the directive against.
* @param {boolean} [checkFallbackDirectives=true] - Whether to check the fallback directives for the specified directive.
* @private
*/
function checkContentSecurityPolicyDirective(directiveName, directiveValue, contentSecurityPolicy, checkFallbackDirectives = true) {
if(directiveName in contentSecurityPolicy) {
if(contentSecurityPolicy[directiveName].includes(directiveValue)) {
return true;
}
return false;
}
if(checkFallbackDirectives && directiveName in contentSecurityPolicyDirectiveFallbacks) {
for(const fallbackDirectiveName of contentSecurityPolicyDirectiveFallbacks[directiveName]) {
if(fallbackDirectiveName in contentSecurityPolicy) {
if(contentSecurityPolicy[fallbackDirectiveName].includes(directiveValue)) {
return true;
}
return false;
}
}
}
return false;
}
/**
* Check that the WebExtensions manifest includes specified API and origin permissions.
* @param {Object} options
* @param {string[]} [options.requiredPermissions=[]] - WebExtensions API permissions that are required.
* @param {string[]} [options.suggestedPermissions=[]] - WebExtensions API permissions that are recommended.
* @param {string[]} [options.requiredOrigins=[]] - Origin permissions that are required.
* @param {string[]} [options.suggestedOrigins=[]] - Origin permissions that are recommended.
* @param {ContentSecurityPolicy} [options.requiredContentSecurityPolicy = {}] - Content Security Policy directives that are required.
* @param {ContentSecurityPolicy} [options.suggestedContentSecurityPolicy = {}] - Content Security Policy directives that are recommended.
* @param {string} [options.warn=true] - Whether to output any missing required or suggested permissions with `console.warn()`.
* @param {string} [options.module="moduleNameNotProvided"] - The name of the module having its permissions checked, used in warning
* output.
* @returns {boolean} Whether the WebExtensions manifest includes the required WebExtensions API permissions, origin permissions, and
* Content Security Policy directives.
*/
export async function check({
requiredPermissions = [],
requiredOrigins = [],
suggestedPermissions = [],
suggestedOrigins = [],
requiredContentSecurityPolicy = {},
suggestedContentSecurityPolicy = {},
warn = true,
module = "moduleNameNotProvided"
}) {
// If this function is called in an environment other than a background script (e.g., a content script
// or a worker script), that could mean the call isn't in the right location (i.e., the check is running
// on a code path that doesn't depend on the permissions), or that could mean the call reflects incorrect
// use of background script code in a non-background environment. Since we cannot distinguish these
// situations, we output a warning to the console and return true.
if((typeof browser !== "object") || !("permissions" in browser)) {
console.warn(`Unable to check ${module} permissions in an environment without browser.permissions. This warning may indicate incorrect use of a background script function in a content script or worker script.`);
return true;
}
let passed = true;
// API permissions
if(requiredPermissions.length > 0) {
const requiredPermissionsCheck = await browser.permissions.contains({ permissions: requiredPermissions });
passed = passed && requiredPermissionsCheck;
if(!requiredPermissionsCheck && warn) {
console.warn(`${module} is missing required API permissions: ${JSON.stringify(requiredPermissions)}`);
}
}
if(suggestedPermissions.length > 0) {
const suggestedPermissionsCheck = await browser.permissions.contains({ permissions: suggestedPermissions });
if(!suggestedPermissionsCheck && warn) {
console.warn(`${module} is missing recommended API permissions: ${JSON.stringify(suggestedPermissions)}`);
}
}
// Origin permissions
if(requiredOrigins.length > 0) {
const requiredOriginsCheck = await browser.permissions.contains({ origins: requiredOrigins });
passed = passed && requiredOriginsCheck;
if(!requiredOriginsCheck && warn) {
console.warn(`${module} is missing required origin permissions: ${JSON.stringify(requiredOrigins)}`);
}
}
if(suggestedOrigins.length > 0) {
const suggestedOriginsCheck = await browser.permissions.contains({ origins: suggestedOrigins });
if(!suggestedOriginsCheck && warn) {
console.warn(`${module} is missing recommended origin permissions: ${JSON.stringify(suggestedOrigins)}`);
}
}
// Content Security Policy directives
// The default CSP for WebExtensions is "script-src 'self'; object-src 'self';"
// See https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/Content_Security_Policy
let manifestContentSecurityPolicyString = "script-src 'self'; object-src 'self';";
const manifest = browser.runtime.getManifest();
if(("content_security_policy" in manifest) && (manifest["content_security_policy"] !== null)) {
manifestContentSecurityPolicyString = manifest["content_security_policy"];
}
const manifestContentSecurityPolicy = parseContentSecurityPolicy(manifestContentSecurityPolicyString);
let passedRequiredContentSecurityPolicy = true;
for(const directiveName of Object.keys(requiredContentSecurityPolicy)) {
for(const directiveValue of requiredContentSecurityPolicy[directiveName]) {
passedRequiredContentSecurityPolicy = passedRequiredContentSecurityPolicy && checkContentSecurityPolicyDirective(directiveName, directiveValue, manifestContentSecurityPolicy);
}
}
passed = passed && passedRequiredContentSecurityPolicy;
if(!passedRequiredContentSecurityPolicy && warn) {
console.warn(`${module} is missing required Content Security Policy directives: ${JSON.stringify(requiredContentSecurityPolicy)}`);
}
let passedSuggestedContentSecurityPolicy = true;
for(const directiveName of Object.keys(suggestedContentSecurityPolicy)) {
for(const directiveValue of suggestedContentSecurityPolicy[directiveName]) {
passedSuggestedContentSecurityPolicy = passedSuggestedContentSecurityPolicy && checkContentSecurityPolicyDirective(directiveName, directiveValue, manifestContentSecurityPolicy);
}
}
passed = passed && passedSuggestedContentSecurityPolicy;
if(!passedSuggestedContentSecurityPolicy && warn) {
console.warn(`${module} is missing recommended Content Security Policy directives: ${JSON.stringify(suggestedContentSecurityPolicy)}`);
}
return passed;
}
/**
* Retrieve the origin match patterns permitted by the extension manifest.
*/
export function getManifestOriginMatchPatterns() {
const manifest = browser.runtime.getManifest();
const matchPatterns = [ ];
if(("permissions" in manifest) && Array.isArray(manifest.permissions)) {
for(const permission of manifest.permissions) {
try {
matching.matchPatternsToRegExp([ permission ]);
matchPatterns.push(permission);
}
catch(error) {
continue;
}
}
}
return matchPatterns;
}
<|start_filename|>src/randomization.js<|end_filename|>
/**
* This module enables running measurements and interventions with randomization,
* such as A/B tests, multivariate tests, and randomized controlled trials.
*
* @module randomization
*/
import * as permissions from "./permissions.js";
/**
* A condition for a measurement or intervention that can be randomly selected.
* @typedef {Object} Condition
* @property {string} name - A name that uniquely identifies the condition within
* the set of conditions.
* @property {number} weight - The positive weight to give this condition when randomly
* selecting a condition from a set.
*/
/**
* @typedef {Object} ConditionSet
* @property {string} name - A name that uniquely identifies the set of conditions.
* @property {Condition[]} conditions - The conditions in the set.
*/
/**
* A map of condition set names to condition names. Maintaining a cache avoids
* storage race conditions. The cache is an Object rather than a Map so it can
* be easily stored in extension local storage.
* @type {Object|null}
* @private
*/
let conditionCache = null;
/**
* A unique key for storing selected conditions in extension local storage.
* @constant {string}
* @private
*/
const storageKey = "webScience.randomization.conditions";
/**
* Selects a condition from a set of conditions. If a condition has previously
* been selected from the set, that same condition will be returned. If not,
* a condition will be randomly selected according to the provided weights.
* @param {ConditionSet} conditionSet - The set of conditions.
* @returns {string} - The name of the selected condition in the condition set.
* @example
* // on first run, returns "red" with 0.5 probability and "blue" with 0.5 probability
* // on subsequent runs, returns the same value as before
* randomization.selectCondition({
* name: "color",
* conditions: [
* {
* name: "red",
* weight: 1
* },
* {
* name: "blue",
* weight: 1
* }
* ]
* });
*/
export async function selectCondition(conditionSet) {
permissions.check({
module: "webScience.linkExposure",
requiredPermissions: [ "storage" ],
suggestedPermissions: [ "unlimitedStorage" ]
});
// Initialize the cache of selected conditions
if(conditionCache === null) {
const retrievedConditions = await browser.storage.local.get(storageKey);
// Check the cache once more, to avoid a race condition
if(conditionCache === null) {
if(storageKey in retrievedConditions)
conditionCache = retrievedConditions[storageKey];
else
conditionCache = { };
}
}
// Try to load the selected condition from the cache
if(conditionSet.name in conditionCache)
return conditionCache[conditionSet.name];
// If there isn't a previously selected condition, select a condition,
// save it to the cache and extension local storage, and return it
let totalWeight = 0;
const conditionNames = new Set();
if(!Array.isArray(conditionSet.conditions) || conditionSet.length === 0)
throw "The condition set must include an array with at least one condition."
for(const condition of conditionSet.conditions) {
if(condition.weight <= 0)
throw "Condition weights must be positive values."
totalWeight += condition.weight;
if(conditionNames.has(condition.name))
throw "Conditions must have unique names."
conditionNames.add(condition.name);
}
let randomValue = Math.random();
let selectedCondition = "";
for(const condition of conditionSet.conditions) {
randomValue -= (condition.weight / totalWeight);
if(randomValue <= 0) {
selectedCondition = condition.name;
break;
}
}
conditionCache[conditionSet.name] = selectedCondition;
// No need to wait for storage to complete
browser.storage.local.set({ [storageKey]: conditionCache });
return selectedCondition.repeat(1);
}
<|start_filename|>src/content-scripts/linkResolution.twitter.content.js<|end_filename|>
/**
* Content script for the `linkResolution` module that parses links from Twitter pages.
* This parsing is fragile and, by design, degrades gracefully to resolving links with
* HTTP requests.
* @module linkResolution.twitter.content
*/
function pageManagerLoaded() {
const pageManager = window.webScience.pageManager;
/**
* How often, in milliseconds, to tick the timer for checking links when the page has attention.
* @constant {number}
*/
const timerInterval = 500;
/**
* The anchor elements that have already been checked on the page.
* @type {WeakSet<HTMLAnchorElement>}
*/
let checkedAnchorElements = new WeakSet();
/**
* The timeout ID for timer ticks when the page has attention.
* @type {number}
*/
let timeoutID = -1;
/**
* Whether the page is currently between page visit start and
* page visit stop events.
*/
let inPageVisit = false;
/**
* A listener for pageManager.onPageVisitStart that resets
* page variables and starts the timer ticking if the page
* has attention.
*/
function pageVisitStartListener() {
checkedAnchorElements = new WeakSet();
timeoutID = -1;
inPageVisit = true;
if(pageManager.pageHasAttention) {
timerTick();
}
}
pageManager.onPageVisitStart.addListener(pageVisitStartListener);
if(pageManager.pageVisitStarted) {
pageVisitStartListener();
}
/**
* A listener for pageManager.onPageVisitStop that
* clears the ticking timer.
*/
function pageVisitStopListener() {
clearTimeout(timeoutID);
}
pageManager.onPageVisitStop.addListener(pageVisitStopListener);
/**
* A listener for pageManager.onPageAttentionUpdate that
* clears the ticking timer if the page loses attention
* and starts the ticking timer if the page gains
* attention.
*/
function pageAttentionUpdateListener() {
// Ignore attention events when we aren't between page visit
// start and page visit stop events
if(!inPageVisit) {
return;
}
if(!pageManager.pageHasAttention) {
clearTimeout(timerTick);
}
else {
timerTick();
}
}
pageManager.onPageAttentionUpdate.addListener(pageAttentionUpdateListener);
/**
* When the timer ticks, check all the anchor elements in the document that haven't already been
* checked.
*/
function timerTick() {
const urlMappings = [ ];
// Iterate through all the anchor elements in the document with an href that starts with
// https://t.co/
const anchorElements = document.querySelectorAll(`a[href^="https://t.co/"]`);
for(const anchorElement of anchorElements) {
try {
// Ignore links that we've already checked
if(checkedAnchorElements.has(anchorElement)) {
continue;
}
checkedAnchorElements.add(anchorElement);
// If the inner text for the link parses as a valid URL, that's the destination
// URL for the mapping
const urlObj = new URL(anchorElement.innerText);
urlMappings.push({
sourceUrl: anchorElement.href,
destinationUrl: urlObj.href,
ignoreSourceUrlParameters: true
});
}
catch {
continue;
}
}
// Notify the background script
if(urlMappings.length > 0) {
browser.runtime.sendMessage({
type: "webScience.linkResolution.registerUrlMappings",
pageId: pageManager.pageId,
urlMappings
});
}
// If the page has attention, set another timer tick
if(pageManager.pageHasAttention) {
timeoutID = setTimeout(timerTick, timerInterval);
}
}
}
// Wait for pageManager load
if (("webScience" in window) && ("pageManager" in window.webScience)) {
pageManagerLoaded();
}
else {
if(!("pageManagerHasLoaded" in window))
window.pageManagerHasLoaded = [];
window.pageManagerHasLoaded.push(pageManagerLoaded);
}
<|start_filename|>tests/build/src/background.js<|end_filename|>
/* eslint-disable no-unused-vars */
import * as WebScience from "../../../src/webScience.js";
<|start_filename|>src/inline.js<|end_filename|>
/**
* This module provides utilities for working with inlined content.
*
* @module inline
*/
/**
* Extract the content from a data URL as a string, decoding it from Base64
* if necessary. Useful for working with content scripts that have been
* encoded with `@rollup/plugin-url`.
* @param {string} dataUrl - The data URL.
* @returns {string} - The content of the URL.
*/
export function dataUrlToString(dataUrl) {
if(!dataUrl.startsWith("data:")) {
throw new Error("Incorrectly formatted data URL.");
}
const commaIndex = dataUrl.indexOf(",");
if(commaIndex < 0) {
throw new Error("Incorrectly formatted data URL.");
}
// Not currently checking that the MIME type is valid
const dataUrlMimeTypeAndEncoding = dataUrl.substring(0, commaIndex);
let content = dataUrl.substring(commaIndex + 1, dataUrl.length);
if(dataUrlMimeTypeAndEncoding.endsWith("base64")) {
content = atob(content);
}
return content;
}
/**
* Convert a data URL to a blob object URL. Useful for working with HTML
* documents that have been encoded with `@rollup/plugin-url`.
* @param {*} dataUrl - The data URL.
* @returns {string} - A blob object URL.
*/
export function dataUrlToBlobUrl(dataUrl) {
return URL.createObjectURL(new Blob([ dataUrlToString(dataUrl) ]));
}
<|start_filename|>src/browser-action-assets/userSurvey.popupNoPrompt.js<|end_filename|>
(async function() {
// Load the message from storage
const messageStorageKey = "webScience.userSurvey.popupNoPromptMessage";
const messageResults = await browser.storage.local.get(messageStorageKey);
if(messageResults[messageStorageKey]) {
const contentElement = document.getElementById("content");
contentElement.textContent = messageResults[messageStorageKey];
}
// Load icon URL from storage
const iconUrlStorageKey = "webScience.userSurvey.popupIconUrl";
const iconUrlResults = await browser.storage.local.get(iconUrlStorageKey);
if(iconUrlResults[iconUrlStorageKey]) {
const iconElement = document.getElementById("icon");
iconElement.src = iconUrlResults[iconUrlStorageKey];
iconElement.style.display = "block";
}
})();
<|start_filename|>src/pageNavigation.js<|end_filename|>
/**
* This module enables measuring user engagement with webpages. See the `onPageData`
* event for specifics.
*
* @module pageNavigation
*/
import * as events from "./events.js";
import * as messaging from "./messaging.js";
import * as pageManager from "./pageManager.js";
import * as matching from "./matching.js";
import pageNavigationContentScript from "include:./content-scripts/pageNavigation.content.js";
/**
* A listener for the `onPageData` event.
* @callback pageDataListener
* @memberof module:pageNavigation.onPageData
* @param {Object} details - Additional information about the page data event.
* @param {string} details.pageId - The ID for the page, unique across browsing sessions.
* @param {string} details.url - The URL of the page, without any hash.
* @param {string} details.referrer - The referrer URL for the page, or `""` if there is no referrer.
* @param {number} details.pageVisitStartTime - The time when the page visit started, in ms since
* the epoch.
* @param {number} details.pageVisitStopTime - The time when the page visit ended, in ms since the
* epoch.
* @param {number} details.attentionDuration - The amount of time in ms that the page had user attention.
* @param {number} details.audioDuration - The amount of time in ms that the page was playing audio.
* @param {number} details.attentionAndAudioDuration - The amount of time in ms that the page both had
* user attention and was playing audio.
* @param {number} details.maxRelativeScrollDepth - The maximum relative scroll depth on the page.
* @param {boolean} details.privateWindow - Whether the page loaded in a private window.
*/
/**
* @typedef {Object} PageDataListenerRecord
* @property {matching.MatchPatternSet} matchPatternSet - The match patterns for the listener.
* @property {boolean} privateWindows - Whether to notify the listener about pages in private windows.
* @property {browser.contentScripts.RegisteredContentScript} contentScript - The content
* script associated with the listener.
* @private
*/
/**
* A map where each key is a listener and each value is a record for that listener.
* @constant {Map<pageDataListener, PageDataListenerRecord>}
* @private
*/
const pageDataListeners = new Map();
/**
* Add a listener for the `onPageData` event.
* @function addListener
* @memberof module:pageNavigation.onPageData
* @param {pageDataListener} listener - The listener to add.
* @param {Object} options - Options for the listener.
* @param {string[]} options.matchPatterns - The webpages that the listener should be notified about, specified with WebExtensions match patterns.
* @param {boolean} [options.privateWindows=false] - Whether to measure pages in private windows.
*/
/**
* Remove a listener for the `onPageData` event.
* @function removeListener
* @memberof module:pageNavigation.onPageData
* @param {pageDataListener} listener - The listener to remove.
*/
/**
* Whether a specified listener has been added for the `onPageData` event.
* @function hasListener
* @memberof module:pageNavigation.onPageData
* @param {pageDataListener} listener - The listener to check.
* @returns {boolean} Whether the listener has been added for the event.
*/
/**
* Whether the `onPageData` event has any listeners.
* @function hasAnyListeners
* @memberof module:pageNavigation.onPageData
* @returns {boolean} Whether the event has any listeners.
*/
/**
* An event that fires when a page visit has ended and data about the
* visit is available.
* @namespace
*/
export const onPageData = events.createEvent({
name: "webScience.pageNavigation.onPageData",
addListenerCallback: addListener,
removeListenerCallback: removeListener,
notifyListenersCallback: () => { return false; }
});
/**
* Whether the module has completed initialization.
* @type {boolean}
* @private
*/
let initialized = false;
/**
* A callback function for adding a page data listener.
* @param {pageDataCallback} listener - The listener being added.
* @param {Object} options - Options for the listener.
* @param {string[]} options.matchPatterns - The match patterns for pages where the listener should
* be notified.
* @param {boolean} [options.privateWindows=false] - Whether the listener should be notified for
* pages in private windows.
* @private
*/
async function addListener(listener, {
matchPatterns,
privateWindows = false
}) {
// Initialization
if(!initialized) {
initialized = true;
await pageManager.initialize();
messaging.onMessage.addListener(pageData => {
// Remove the type string from the content script message
delete pageData.type;
// Notify listeners when the private window and match pattern requirements are met
for(const [listener, listenerRecord] of pageDataListeners) {
if((!pageData.privateWindow || listenerRecord.privateWindows)
&& (listenerRecord.matchPatternSet.matches(pageData.url))) {
listener(pageData);
}
}
},
{
type: "webScience.pageNavigation.pageData",
schema: {
pageId: "string",
url: "string",
referrer: "string",
pageVisitStartTime: "number",
pageVisitStopTime: "number",
attentionDuration: "number",
audioDuration: "number",
attentionAndAudioDuration: "number",
maxRelativeScrollDepth: "number",
privateWindow: "boolean"
}
});
}
// Compile the match patterns for the listener
const matchPatternSet = matching.createMatchPatternSet(matchPatterns);
// Register a content script for the listener
const contentScript = await browser.contentScripts.register({
matches: matchPatterns,
js: [{
file: pageNavigationContentScript
}],
runAt: "document_start"
});
// Store a record for the listener
pageDataListeners.set(listener, {
matchPatternSet,
contentScript,
privateWindows
});
}
/**
* A callback function for removing a page data listener.
* @param {pageDataCallback} listener - The listener that is being removed.
* @private
*/
function removeListener(listener) {
// If there is a record of the listener, unregister its content script
// and delete the record
const listenerRecord = pageDataListeners.get(listener);
if(listenerRecord === undefined) {
return;
}
listenerRecord.contentScript.unregister();
pageDataListeners.delete(listener);
}
<|start_filename|>src/timing.js<|end_filename|>
/**
* This module facilitates timestamping events, using a standardized clock.
* When supported by the browser, WebScience uses the shared monotonic clock
* specified by the W3C High Resolution Time recommendation. Otherwise,
* WebScience uses the system clock.
*
* ## Web Browser Clocks
* There are two clocks supported in modern web browsers.
* * __System Clock__ (`Date.now()` or `new Date`). The system clock is
* the ordinary time provided by the operating system. Using the
* system clock to timestamp events poses a risk: the user or operating
* system can adjust the clock at any time, for any reason, without any
* notice, to any value. The user might manually adjust the clock, for
* example, or the operating system might synchronize the clock to account
* for clock skew (e.g., NTP time sync). These adjustments can be large and
* non-monotonic, breaking assumptions that WebScience makes about timestamp
* proximity and ordering. A clock change during study execution could
* introduce subtle bugs or other unexpected behavior.
* * __Shared Monotonic Clock__
* (`performance.timeOrigin + performance.now()`). The W3C High Resolution
* Time recommendation specifies a shared monotonic clock. This clock
* should have the following properties:
* * strictly monotonic;
* * not subject to large or non-monotonic adjustments from any source;
* * consistent across cores, processes, threads, and globals down to the
* hardware level; and
* * synchronized to the system clock just once, on browser startup.
*
* Our goal is to migrate WebScience and Rally studies to the shared monotonic
* clock, because it does not have clock change risks like the system clock.
* Unfortunately, browser implementations of High Resolution Time currently
* depart from the W3C recommendation in significant ways that prevent reliance
* on the shared monotonic clock. We will update this module as browsers correct
* their implementations.
*
* ## Additional Notes
* * The High Resolution Time spec describes a shared monotonic clock (which
* must be used to generate `performance.timeOrigin` for each global) and
* per-global monotonic clocks (which tick for `performance.now()` and other
* uses of `DOMHighResTimeStamp`). Monotonic clocks on modern hardware are
* synchronized across cores, processes, and threads, so we treat
* `performance.timeOrigin + performance.now()` as the current time on the
* shared monotonic clock, even though the W3C spec doesn't _quite_ say that.
* * Firefox and Chrome currently depart from the High Resolution Time
* spec in significant ways: `performance.timeOrigin` is sometimes set from
* the system clock rather than the shared monotonic clock, and
* `performance.now()` (and other uses of `DOMHighResTimeStamp`) do not
* tick during system sleep on certain platforms.
*
* @see {@link https://www.w3.org/TR/hr-time-2/}
* @see {@link https://github.com/mdn/content/issues/4713}
* @see {@link https://github.com/w3c/hr-time/issues/65}
* @module timing
*/
/**
* Get whether the browser supports the High Resolution Time shared
* monotonic clock. Currently always returns `false`. We will update
* this function as browser support improves.
* @returns {boolean} Whether the browser supports the shared monotonic
* clock.
* @private
*/
function sharedMonotonicClockSupport() {
return false;
}
/**
* Get the current time, in milliseconds since the epoch, using a
* standardized clock.
* @returns {number} The current time, in milliseconds since the epoch.
*/
export function now() {
if(sharedMonotonicClockSupport()) {
return window.performance.timeOrigin + window.performance.now();
}
return Date.now();
}
/**
* Convert a timestamp on the system clock to a timestamp on the
* standardized clock. Use this function only where strictly necessary,
* and where it can be used immediately after the timestamp on the
* system clock. There is a risk that the system clock will have
* changed between the timestamp and now.
* @param {number} timeStamp - A timestamp, in milliseconds since the
* epoch, on the system clock.
* @returns {number} A timestamp, in milliseconds since the epoch, on
* the standardized clock.
* @example
* const systemTimeStamp = Date.now();
* const standardizedTimeStamp = webScience.timing.fromSystemClock(systemTimeStamp);
*/
export function fromSystemClock(timeStamp) {
if(sharedMonotonicClockSupport()) {
return timeStamp - Date.now() + window.performance.timeOrigin + window.performance.now();
}
return timeStamp;
}
/**
* Convert a timestamp on the shared monotonic clock to a timestamp
* on the standardized clock. Use this function only where strictly
* necessary, and where it can be used immediately after the timestamp
* on the monotonic clock. There is a risk that the system clock will
* have changed between the timestamp and now or that the monotonic
* clock was affected by an implementation bug.
* @param {number} timeStamp - A timestamp, in milliseconds since the
* epoch, on the shared monotonic clock.
* @param {boolean} relativeToTimeOrigin - Whether the timestamp
* is relative to a time origin (e.g., a DOM event or Performance API
* timestamp), or the time origin has already been added to the
* timestamp (e.g., `performance.timeOrigin` or
* `performance.timeOrigin + performance.now()`).
* @returns {number} A timestamp, in milliseconds since the epoch, on
* the standardized clock.
* @example
* const monotonicTimeStamp = performance.timeOrigin;
* const standardizedTimeStamp = webScience.timing.fromMonotonicClock(monotonicTimeStamp, false);
* @example
* const monotonicTimeStamp = performance.timeOrigin + performance.now();
* const standardizedTimeStamp = webScience.timing.fromMonotonicClock(monotonicTimeStamp, false);
* @example
* const relativeMonotonicTimeStamp = performance.now();
* const standardizedTimeStamp = webScience.timing.fromMonotonicClock(relativeMonotonicTimeStamp, true);
*/
export function fromMonotonicClock(timeStamp, relativeToTimeOrigin) {
if(sharedMonotonicClockSupport()) {
if(relativeToTimeOrigin) {
return window.performance.timeOrigin + timeStamp;
}
return timeStamp;
}
if(relativeToTimeOrigin) {
return timeStamp - window.performance.now() + Date.now();
}
return timeStamp - window.performance.now() - window.performance.timeOrigin + Date.now();
}
<|start_filename|>src/content-scripts/socialMediaActivity.facebook.content.js<|end_filename|>
/**
* Content script for getting Facebook post contents
* @module socialMediaActivity.facebook.content
*/
import { facebookLinkShimRegExp, parseFacebookLinkShim, removeFacebookLinkDecoration } from "../linkResolution.js";
// async IIFE wrapper to enable await syntax
(async function() {
// stop running if this is an incognito window and we're not supposed to run there
const privateWindowResults = await browser.storage.local.get("webScience.socialMediaLinkSharing.privateWindows");
if (("webScience.socialMediaLinkSharing.privateWindows" in privateWindowResults)
&& !privateWindowResults["webScience.socialMediaLinkSharing.privateWindows"]
&& browser.extension.inIncognitoContext) { return; }
// Let the background page know that the script is loaded and which tab it's in
browser.runtime.sendMessage({type: "webScience.socialMediaActivity",
platform: "facebook"});
const trackedReshares = []
let mostRecentReshare = null;
function logReshareClick(clicked) {
const node = clicked.srcElement;
mostRecentReshare = node;
let profile = null;
let type = null;
const posts = document.querySelectorAll('div[role="article"]');
for (const post of posts) {
if (post.contains(mostRecentReshare)) {
const internal = /https:\/\/www.facebook.com\//;
const links = post.querySelectorAll("a[href]");
for (const link of links) {
if (internal.test(link.getAttribute("href"))) {
profile = link;
break;
}
}
fetch(profile.getAttribute("href"), {"credentials":"omit"}).then((rFF) => {
rFF.text().then((text) => {
const u0 = /u0040type":"([a-zA-Z0-9]*)"/;
const uType = u0.exec(text);
if (uType == null || (uType.length > 1 && uType[1] == "Person")) {
type = "person";
} else type = "page";
mostRecentReshare = type;
});
});
}
}
}
function reshareSourceTracking() {
const reshareButtons = document.querySelectorAll("div[aria-label*='Send this to friends']");
for (const reshareButton of reshareButtons) {
if (!(trackedReshares.includes(reshareButton))) {
trackedReshares.push(reshareButton);
reshareButton.addEventListener("click", logReshareClick);
}
}
}
setInterval(() => reshareSourceTracking(), 3000);
/**
* Find links and text inside a node that's part of a Facebook post
* @param node -- the node to search inside
* @param response -- an object to add found links to
*/
function searchFacebookPost(node, response) {
response.attachedUrls = [];
response.content = [];
// This is the class name used for the display boxes for news articles
// When a post contains one link and it's at the end of the post, the url
// isn't included in the post text, so we have to find it here instead.
const mediaBoxes = node.querySelectorAll("a[class=_52c6]")
for (const mediaBox of mediaBoxes) {
const rawUrl = mediaBox.getAttribute("href");
const parsedUrl = removeShim(rawUrl).url;
response.attachedUrls.push(parsedUrl);
}
const postBodies = node.querySelectorAll("div[data-testid=post_message]");
for (const postBody of postBodies) {
for (const elem of postBody.childNodes[0].childNodes) {
if (elem.nodeName == "A") {
response.content.push(removeShim(elem.href).url);
}
if(elem.nodeName == "#text") {
response.content.push(elem.data);
}
}
}
}
function parseFacebookUrl(url, request) {
const oldGroupRegex = /facebook\.com\/groups\/([^/]*)\/permalink\/([0-9]*)/;
const newGroupRegex = /facebook\.com\/groups\/([^/]*)\/\?post_id=([0-9]*)/;
const userIdRegex = /facebook\.com\/permalink\.php\?story_fbid=([0-9]*)&id=([0-9]*)/;
const usernameRegex = /facebook\.com\/([^/]*)\/posts\/([0-9]*)/;
let username = ""; let groupName = ""; let newUrl = ""; let userId = "";
const oldGroupResult = oldGroupRegex.exec(url);
if (oldGroupResult) {
groupName = oldGroupResult[1];
newUrl = `facebook.com/groups/${groupName}/permalink/${request.postId}`;
}
const newGroupResult = newGroupRegex.exec(url);
if (newGroupResult) {
groupName = newGroupResult[1];
newUrl = `facebook.com/groups/${groupName}/permalink/${request.postId}`;
}
const idResult = userIdRegex.exec(url);
if (idResult) {
userId = idResult[2];
newUrl = idResult[0];
}
const nameResult = usernameRegex.exec(url);
if (nameResult) {
username = nameResult[1];
newUrl = nameResult[0];
}
return({newUrl: newUrl, groupName: groupName, username: username, userId: userId});
}
/**
* Send a fetch request for the post we're looking for, and parse links from the result
* @param request -- the request post's ID and the ID of the person who shared it
* @param response -- an object set up with arrays for the links to return
*/
async function getFullUrl(request) {
return new Promise((resolve, reject) => {
const reqString = `https://www.facebook.com/${request.postId}`;
fetch(reqString, {credentials: 'include'}).then((responseFromFetch) => {
const redir = responseFromFetch.url;
resolve(parseFacebookUrl(redir, request));
/*
var oldGroupRegex = /facebook\.com\/groups\/([^\/]*)\/permalink\/([0-9]*)/;
var newGroupRegex = /facebook\.com\/groups\/([^\/]*)\/\?post_id=([0-9]*)/;
var userIdRegex = /facebook\.com\/permalink\.php\?story_fbid=([0-9]*)&id=([0-9]*)/;
var usernameRegex = /facebook\.com\/([^\/]*)\/posts\/([0-9]*)/;
var username = ""; var groupName = ""; var newUrl = ""; var userId = "";
var oldGroupResult = oldGroupRegex.exec(redir);
if (oldGroupResult) {
groupName = oldGroupResult[1];
newUrl = `facebook.com/groups/${groupName}/permalink/${request.postId}`;
}
var newGroupResult = newGroupRegex.exec(redir);
if (newGroupResult) {
groupName = newGroupResult[1];
newUrl = `facebook.com/groups/${groupName}/permalink/${request.postId}`;
}
var idResult = userIdRegex.exec(redir);
if (idResult) {
userId = idResult[2];
newUrl = idResult[0];
}
var nameResult = usernameRegex.exec(redir);
if (nameResult) {
username = nameResult[1];
newUrl = nameResult[0];
}
resolve({newUrl: newUrl, groupName: groupName, username: username, userId: userId});
*/
});
});
}
function recStructure(node) {
let links = [];//node.querySelectorAll ? node.querySelectorAll(`a[target='_blank']`) : [];
let ret;
if (node.textContent == "") {
links = node.querySelectorAll ? node.querySelectorAll(`a[target='_blank']`) : [];
links = Array.prototype.map.call(links, link => link.href ? removeShim(link.href).url : null);
if (node.target && node.href && node.target == "_blank") {
links.push(removeShim(node.href).url);
}
if (links.length == 0) return null;
ret = {"text": null, "links": links};
return ret;
}
if (node.childNodes.length == 0) {
links = node.querySelectorAll ? node.querySelectorAll(`a[target='_blank']`) : [];
links = Array.prototype.map.call(links, link => link.href ? removeShim(link.href).url : null);
if (node.target && node.href && node.target == "_blank") {
links.push(removeShim(node.href).url);
}
ret = {"text": node.textContent, "links": links};
return ret;
}
const children = [];
for (const child of node.childNodes) {
const childContent = recStructure(child);
if (childContent != null) children.push(childContent);
}
if (children.length == 0) {
console.log("ERROR", node, children, node.textContent, links);
}
ret = children.length == 0 ? null : (children.length == 1 ? children[0] : children);
return ret;
}
function isComments(structure) {
if (structure == null) return false;
if ("text" in structure) return false;
if (structure.length >= 2) {
if ("text" in structure[0] && structure[0].text == "Like" &&
"text" in structure[1] && structure[1].text == "Comment") {
return true;
}
}
for (const child of structure) {
if (isComments(child)) {
return true;
}
}
return false;
}
function removeComments(structure) {
let index = 0;
for (const child of structure) {
const childIsComments = isComments(child);
if (childIsComments) {
structure.splice(index, 1);
return;
}
index += 1;
}
return structure;
}
function condenseContent(structure, text, links) {
if (structure == null) {
console.log("ERROR", structure, text, links);
return;
}
if ("text" in structure && "links" in structure) {
if (structure.text != null) text.push(structure.text);
for (const link of structure.links) {
links.push(link);
}
return;
}
for (const child of structure) {
condenseContent(child, text, links);
}
}
/**
* Removes url shim. Currently supports only facebook urls
* @param {string} url
* @returns {Object} url property whose value is same as input or deshimmed url depending on whether the input is
* matches facebook shim format. A boolean isShim property that is true if the format matches
*/
function removeShim(url) {
// check if the url matches shim
if (facebookLinkShimRegExp.test(url)) {
return {
url: removeFacebookLinkDecoration(parseFacebookLinkShim(url)),
isShim: true
};
}
return {
url: url,
isShim: false
};
}
browser.runtime.onMessage.addListener(async (request) => {
return new Promise((resolve, reject) => {
if ("recentReshare" in request) {
resolve(mostRecentReshare);
return;
}
const response = {};
response.content = [];
response.attachedUrls = [];
// Try to find the post on the page (should be in view)
const requestedPost = document.body.querySelector(`a[href*="${request.postId}"]`);
getFullUrl(request).then((detailsObj) => {
const newUrl = detailsObj.newUrl;
const username = detailsObj.username;
const groupName = detailsObj.groupName;
response.username = username;
response.groupName = groupName;
let node = requestedPost;
// New FB
try {
const posts = document.querySelectorAll('div[role="article"]');
let wantedPost;
for (const post of posts) {
if (post.hasAttribute("aria-label")) continue;
if (post.querySelector(`a[href*="${newUrl}"]`)) {
wantedPost = post;
break;
}
}
const recStructureWanted = recStructure(wantedPost);
const textRet = [];
const linksRet = [];
removeComments(recStructureWanted);
condenseContent(recStructureWanted, textRet, linksRet);
response.content = textRet;
response.attachedUrls = linksRet;
resolve(response);
return;
} catch (error) {
while (node.parentElement != null) {
node = node.parentElement;
if (node.hasAttribute("class") &&
node.getAttribute("class").includes("userContentWrapper")) {
searchFacebookPost(node, response);
}
// when the user is sharing something from an existing reshare post,
// the media box isn't inside the userContentWrapper (it's at the top of
// the reshare post, above the share buttons).
// To find it, we look for this clearfix class which encloses the media box.
if (node.hasAttribute("class") &&
node.getAttribute("class").includes("_5pcr clearfix")) {
searchFacebookPost(node, response);
}
}
resolve(response);
}
});
});
});
})();
<|start_filename|>src/content-scripts/linkExposure.content.js<|end_filename|>
/**
* Content script for the linkExposure module.
* @module linkExposure.content
*/
import { urlToPS1 } from "../linkResolution.js";
import * as timing from "../timing.js";
import { urlShortenerWithContentMatchPatterns } from "../data/urlShortenersWithContent.js";
import { createMatchPatternSet } from "../matching.js";
// async IIFE wrapper to enable await syntax and early returns
(async function () {
// If the linkExposure content script is already running on this page, no need for this instance
if("webScience" in window) {
if("linkExposureActive" in window.webScience) {
return;
}
window.webScience.linkExposureActive = true;
}
else {
window.webScience = {
linkExposureActive: true
}
}
let pageManager = null;
/**
* How often (in milliseconds) to check the page for new links.
* @constant {number}
*/
const updateInterval = 3000;
/**
* How long (in milliseconds) after losing attention to stop checking the links on the page.
* The content script will resume checking links after regaining attention.
* @constant {number}
*/
const attentionIdlePeriod = 5000;
/**
* Ignore links where the link URL PS+1 is identical to the page URL PS+1.
* Note that there is another ignoreSelfLinks constant in the linkExposure
* background script, and these two constants should have the same value.
* @constant {boolean}
*/
const ignoreSelfLinks = true;
/**
* A match pattern set of URL shorteners with content. We except these URL
* shorteners from immediately being considered self-links, since they
* might resolve to a URL that isn't a self-link.
*/
const urlShortenerWithContentMatchPatternSet = createMatchPatternSet(urlShortenerWithContentMatchPatterns);
/**
* The minimum duration (in milliseconds) that a link must be visible to treat it as an exposure.
* @constant {number}
*/
const linkVisibilityDuration = 3000;
/**
* The minimum width (in pixels from `Element.getBoundingClientRect()`) that a link must have to treat it as an exposure.
* @constant {number}
*/
const linkMinimumWidth = 25;
/**
* The minimum height (in pixels from `Element.getBoundingClientRect()`) that a link must have to treat it as an exposure.
* @constant {number}
*/
const linkMinimumHeight = 15;
/**
* The minimum visibility (as a proportion of element size from `IntersectionObserverEntry.intersectionRatio`) that a link must have to treat it as an exposure.
* @constant {number}
*/
const linkMinimumVisibility = 0.7;
/**
* Check if an Element is visible. Visibility is defined as a `display` computed style other than `none` and an `opacity` computed style other than 0.
* @param {Element} element - The element to check.
* @returns {boolean} Whether the element is visible, or `false` if the parameter `element` is not an `Element`.
*/
function isElementVisible(element) {
if(!(element instanceof Element))
return false;
const style = window.getComputedStyle(element);
const display = style.getPropertyValue("display");
if((display === "") || (display === "none"))
return false;
const opacity = style.getPropertyValue("opacity");
if((opacity === "") || (opacity === "0"))
return false;
return true;
}
/**
* Converts a link URL, which may be relative, to an absolute URL.
* @param {string} url - The input URL, which may be relative.
* @returns {string} If the `url` is relative, an absolute version of `url`. Otherwise just `url`.
*/
function linkUrlToAbsoluteUrl(url) {
// Note that if the url is already absolute, the URL constructor will ignore the specified base URL
return (new URL(url, pageManager.url)).href;
}
/**
* The ID for a timer to periodically check links.
* @type {number}
*/
let timerId = 0;
/**
* The time when the page last lost the user's attention, or -1 if the page has never had the user's attention.
* @type {number}
*/
let lastLostAttention = -1;
/**
* Additional information about an anchor element.
* @typedef {Object} LinkInfo
* @property {boolean} observing - Whether this is a link that we are currently observing.
* @property {string} [url] - The URL for the link.
* @property {number} [totalTimeExposed] - How long (in milliseconds) that the link has been in view.
* @property {boolean} [inViewport] - Whether the link is in the browser viewport.
* @property {number} [lastExposureStartTime] - When the last exposure to the link began.
*/
/**
* A WeakMap where keys are anchor elements that we have checked and values are additional information about those elements.
* @type {WeakMap<HTMLAnchorElement, LinkInfo>}
*/
let anchorElements = new WeakMap();
// The URLs of exposed links to include in the update to the background script
let exposedLinkURLs = [];
/**
* The public suffix + 1 for the page URL.
* @type {string}
*/
let pagePS1 = "";
/**
* Update the time that the user has been exposed to a link. If the link
* exposure is longer than the threshold, queue the link for reporting to the
* background script and stop observing it.
* @param {number} timeStamp - The time when the underlying event fired.
* @param {HTMLAnchorElement} anchorElement - The anchor element.
* @param {LinkInfo} linkInfo - Information about the link.
*/
function updateLinkExposure(timeStamp, anchorElement, linkInfo) {
// If we aren't observing the link, there's nothing to update
if(!linkInfo.observing) {
return;
}
// If the user is currently exposed to the link (i.e., the page has attention, the link is
// in the viewport, and the link is visible), accumulate how long the link exposure lasted
// and move up the link exposure start time
if(pageManager.pageHasAttention && linkInfo.inViewport && isElementVisible(anchorElement)) {
if(linkInfo.lastExposureStartTime > 0) {
linkInfo.totalTimeExposed += timeStamp - linkInfo.lastExposureStartTime;
}
linkInfo.lastExposureStartTime = timeStamp;
}
// If the user is not exposed to the link, drop the link exposure start time
else {
linkInfo.lastExposureStartTime = -1;
}
// If the user has been exposed to the link longer than the visibility threshold, queue the
// link URL for sending to the background script and stop observing the link
if(linkInfo.totalTimeExposed >= linkVisibilityDuration) {
exposedLinkURLs.push(linkInfo.url);
anchorElements.set(anchorElement, { observing: false });
observer.unobserve(anchorElement);
}
}
/**
* Iterates the anchor elements in the DOM, calling the callback function with
* each anchor element.
* @param {Function} callback
*/
function forEachAnchorElement(callback) {
document.body.querySelectorAll("a[href]").forEach(anchorElement => {
callback(anchorElement);
});
}
/**
* A timer callback function that checks links (anchor elements) in the DOM.
*/
function timerTick() {
const timeStamp = timing.now();
// Iterate all the links currently on the page (i.e., anchor elements with an href attribute)
forEachAnchorElement(anchorElement => {
const linkInfo = anchorElements.get(anchorElement)
// If we haven't seen this link before, check the URL
if (linkInfo === undefined) {
const url = linkUrlToAbsoluteUrl(anchorElement.href);
// Check if the link URL PS+1 matches the page PS+1.
// If there's a match and we're ignoring self links,
// don't observe the link.
// We exempt URL shorteners with content from this
// check, since the resolved URL might not be a self-link.
if(ignoreSelfLinks &&
(urlToPS1(url) === pagePS1) &&
!urlShortenerWithContentMatchPatternSet.matches(url)) {
anchorElements.set(anchorElement, { observing: false });
return;
}
// Check if the link is too small, and if it is,
// don't observe the link
// Note: we only measure element size once because
// getBoundingClientRect is expensive and links rarely
// change size
const elementRect = anchorElement.getBoundingClientRect();
if ((elementRect.width < linkMinimumWidth) ||
(elementRect.height < linkMinimumHeight)) {
anchorElements.set(anchorElement, { observing: false });
return;
}
// Start observing the link
anchorElements.set(anchorElement, {
observing: true,
url,
totalTimeExposed: 0,
inViewport: false,
lastExposureStartTime: -1
});
observer.observe(anchorElement);
return;
}
// If we have seen this link before, update the user's exposure to the link
updateLinkExposure(timeStamp, anchorElement, linkInfo);
});
notifyBackgroundScript();
// If the page does not have attention and we're confident that the page did not recently have attention, stop ticking the timer
if (!pageManager.pageHasAttention && ((lastLostAttention < 0) || (lastLostAttention + attentionIdlePeriod < timeStamp))) {
clearInterval(timerId);
timerId = 0;
return;
}
}
/**
* Notify the background script of any exposed links.
*/
function notifyBackgroundScript() {
if (exposedLinkURLs.length > 0) {
browser.runtime.sendMessage({
type: "webScience.linkExposure.linkExposureUpdate",
pageId: pageManager.pageId,
url: pageManager.url,
privateWindow: browser.extension.inIncognitoContext,
linkUrls: exposedLinkURLs
});
exposedLinkURLs = [];
}
}
/**
* An IntersectionObserver callback for anchor elements.
* @param {IntersectionObserverEntry[]} entries - Updates from the IntersectionObserver that is observing anchor elements.
*/
function anchorObserverCallback(entries) {
const timeStamp = timing.now();
entries.forEach(entry => {
const anchorElement = entry.target;
const linkInfo = anchorElements.get(anchorElement);
// Update whether the link is in the viewport, applying the minimum visibility threshold
linkInfo.inViewport = entry.intersectionRatio >= linkMinimumVisibility;
// Update the user's exposure to the link
updateLinkExposure(timeStamp, anchorElement, linkInfo);
});
}
/**
* An IntersectionObserver for checking link visibility.
* @constant {IntersectionObserver}
*/
const observer = new IntersectionObserver(anchorObserverCallback, { threshold: linkMinimumVisibility });
/**
* A listener for pageManager.onPageVisitStart. Resets page-specific data and starts the
* timer ticking.
*/
function pageVisitStartListener () {
// Reset page-specific data
lastLostAttention = -1;
anchorElements = new WeakMap();
pagePS1 = urlToPS1(pageManager.url);
exposedLinkURLs = [];
// Start the timer ticking
timerId = setInterval(timerTick, updateInterval);
}
/**
* A listener for pageManager.onPageVisitStop. Clears the timer and intersection observer.
*/
function pageVisitStopListener() {
// There might be links queued for reporting, so report them
notifyBackgroundScript();
clearInterval(timerId);
timerId = 0;
observer.disconnect();
}
/**
* A listener for pageManager.onPageAttentionUpdate.
* @param {Options} details
* @param {number} details.timeStamp
*/
function pageAttentionUpdateListener({ timeStamp }) {
// If the page has gained attention, and the timer isn't ticking, start ticking
if(pageManager.pageHasAttention && (timerId <= 0)) {
timerId = setInterval(timerTick, updateInterval);
}
// If the page has lost attention, save the timestamp
if(!pageManager.pageHasAttention) {
lastLostAttention = timeStamp;
}
// Iterate all the links currently on the page and update link exposure
forEachAnchorElement(anchorElement => {
const linkInfo = anchorElements.get(anchorElement);
if(linkInfo === undefined) {
return;
}
updateLinkExposure(timeStamp, anchorElement, linkInfo);
});
}
// Wait for pageManager load
function pageManagerLoaded() {
pageManager = window.webScience.pageManager;
pageManager.onPageVisitStart.addListener(pageVisitStartListener);
if(pageManager.pageVisitStarted) {
pageVisitStartListener();
}
pageManager.onPageVisitStop.addListener(pageVisitStopListener);
pageManager.onPageAttentionUpdate.addListener(pageAttentionUpdateListener);
}
if (("webScience" in window) && ("pageManager" in window.webScience))
pageManagerLoaded();
else {
if(!("pageManagerHasLoaded" in window))
window.pageManagerHasLoaded = [];
window.pageManagerHasLoaded.push(pageManagerLoaded);
}
})();
<|start_filename|>tests/build/manifest.json<|end_filename|>
{
"description": "A build test for the WebScience library.",
"author": "Mozilla",
"manifest_version": 2,
"name": "WebScience Build Test",
"version": "1.0.0",
"homepage_url": "https://github.com/mozilla-rally/web-science",
"browser_specific_settings": {
"gecko": {
"id": "<EMAIL>",
"strict_min_version": "87.0"
}
},
"background": {
"scripts": ["dist/background.js"]
},
"incognito": "not_allowed"
}
<|start_filename|>src/linkExposure.js<|end_filename|>
/**
* This module enables measuring user exposure to linked content. See the
* `onLinkExposureData` and `onLinkExposureUpdate` events for specifics.
* There is an important difference between these events: `onLinkExposureData`
* fires once per page with a complete set of link exposure data, while
* `onLinkExposureUpdate` fires throughout a page's lifespan as link exposures
* occur. For most use cases, `onLinkExposureData` is the right event to use.
*
* @module linkExposure
*/
import * as events from "./events.js";
import * as linkResolution from "./linkResolution.js";
import * as matching from "./matching.js";
import * as messaging from "./messaging.js";
import * as pageManager from "./pageManager.js";
import * as permissions from "./permissions.js";
import linkExposureContentScript from "include:./content-scripts/linkExposure.content.js";
/**
* Ignore links where the link URL PS+1 is identical to the page URL PS+1.
* Note that there is another ignoreSelfLinks constant in the linkExposure
* content script, and these two constants should have the same value.
* @constant {boolean}
* @private
*/
const ignoreSelfLinks = true;
/**
* The details of a link exposure update event. This private type must be kept in
* sync with the public `linkExposureUpdateListener` type.
* @typedef {Object} LinkExposureUpdateDetails
* @property {string} pageId - The ID for the page, unique across browsing sessions.
* @property {string} url - The URL of the page, without any hash.
* @property {string[]} matchingLinkUrls - An array containing the resolved URLs of links
* on the page that the user was exposed to and that matched a provided match pattern.
* @property {number} nonmatchingLinkCount - The number of resolved links on the page that
* the user was exposed to and that did not match a provided match pattern.
* @private
*/
/**
* A listener for the `onLinkExposureUpdate` event.
* @callback linkExposureUpdateListener
* @memberof module:linkExposure.onLinkExposureUpdate
* @param {Object} details - Additional information about the link
* exposure update event.
* @param {string} details.pageId - The ID for the page, unique across browsing sessions.
* @param {string} details.url - The URL of the page, without any hash.
* @param {string[]} details.matchingLinkUrls - An array containing the resolved URLs of links
* on the page that the user was exposed to and that matched a provided match pattern.
* @param {number} details.nonmatchingLinkCount - The number of resolved links on the page that
* the user was exposed to and that did not match a provided match pattern.
*/
/**
* Options when adding a listener for the `onLinkExposureUpdate` event. This
* private type must be kept in sync with the public `onLinkExposureUpdate.addListener`
* type.
* @typedef {Object} LinkExposureUpdateOptions
* @property {string[]} linkMatchPatterns - Match patterns for links where the listener
* should receive individual resolved URLs. Links that do not match this match pattern are
* included in an aggregate count.
* @property {string[]} pageMatchPatterns - Match patterns for pages where the listener
* should be provided link exposure data.
* @property {boolean} [privateWindows=false] - Whether to measure links in private windows.
* @private
*/
/**
* @typedef {Object} LinkExposureUpdateListenerRecord
* @property {matching.MatchPatternSet} linkMatchPatternSet - The match patterns for link URLs.
* @property {matching.MatchPatternSet} pageMatchPatternSet - The match patterns for pages.
* @property {boolean} privateWindows - Whether to report exposures in private windows.
* @property {browser.contentScripts.RegisteredContentScript} contentScript - The content
* script associated with the listener.
* @private
*/
/**
* A map where each key is a listener and each value is a record for that listener.
* @constant {Map<linkExposureUpdateListener, LinkExposureUpdateListenerRecord>}
* @private
*/
const linkExposureUpdateListeners = new Map();
/**
* A map where each key is a page ID and each value is a count of pending page link exposure updates
* waiting on link resolution.
* @constant {Map<string, number>}
* @private
*/
const pendingPageLinkExposureUpdates = new Map();
/**
* A map where each key is a page ID and each value is a callback function that is fired when there
* are no more pending link exposure updates for the page ID.
* @constant {Map<string, Function>}
* @private
*/
const pendingPageLinkExposureCallbacks = new Map();
/**
* Add a listener for the `onLinkExposureUpdate` event.
* @function addListener
* @memberof module:linkExposure.onLinkExposureUpdate
* @param {linkExposureUpdateListener} listener - The listener to add.
* @param {Object} options - Options for the listener.
* @param {string[]} options.linkMatchPatterns - Match patterns for links where the listener
* should receive individual resolved URLs. Links that do not match this match pattern are
* included in an aggregate count.
* @param {string[]} options.pageMatchPatterns - Match patterns for pages where the listener
* should be provided link exposure data.
* @param {boolean} [options.privateWindows=false] - Whether to measure links in private windows.
*/
/**
* Remove a listener for the `onLinkExposureUpdate` event.
* @function removeListener
* @memberof module:linkExposure.onLinkExposureUpdate
* @param {linkExposureUpdateListener} listener - The listener to remove.
*/
/**
* Whether a specified listener has been added for the `onLinkExposureUpdate` event.
* @function hasListener
* @memberof module:linkExposure.onLinkExposureUpdate
* @param {linkExposureUpdateListener} listener - The listener to check.
* @returns {boolean} Whether the listener has been added for the event.
*/
/**
* Whether the `onLinkExposureUpdate` event has any listeners.
* @function hasAnyListeners
* @memberof module:linkExposure.onLinkExposureUpdate
* @returns {boolean} Whether the event has any listeners.
*/
/**
* An event that fires when data about link exposures on a page is available. This event can fire multiple
* times for one page, as link exposures occur and the URLs for those links are resolved.
* @namespace
*/
export const onLinkExposureUpdate = events.createEvent({
name: "webScience.linkExposure.onLinkExposureUpdate",
addListenerCallback: addUpdateListener,
removeListenerCallback: removeUpdateListener,
notifyListenersCallback: () => { return false; }
});
/**
* Whether the module has been initialized by checking permissions and adding a
* messaging.onMessage listener.
* @type {boolean}
* @private
*/
let initialized = false;
/**
* Callback for adding an onLinkExposureUpdate listener.
* @param {linkExposureUpdateListener} listener - The listener.
* @param {LinkExposureUpdateOptions} options - A set of options for the listener.
* @private
*/
async function addUpdateListener(listener, { linkMatchPatterns, pageMatchPatterns, privateWindows = false }) {
// Initialization
await pageManager.initialize();
if(!initialized) {
initialized = true;
permissions.check({
module: "webScience.linkExposure",
requiredPermissions: [ "storage" ],
suggestedPermissions: [ "unlimitedStorage" ]
});
messaging.onMessage.addListener(messageListener, {
type: "webScience.linkExposure.linkExposureUpdate",
schema: {
pageId: "string",
url: "string",
privateWindow: "boolean",
linkUrls: "object"
}
});
}
// Compile the match patterns for link URLs and page URLs
const linkMatchPatternSet = matching.createMatchPatternSet(linkMatchPatterns);
const pageMatchPatternSet = matching.createMatchPatternSet(pageMatchPatterns);
// Register a content script for the page URLs
const contentScript = await browser.contentScripts.register({
matches: pageMatchPatterns,
js: [{
file: linkExposureContentScript
}],
runAt: "document_idle"
});
// Store the listener information in a record
linkExposureUpdateListeners.set(listener, {
linkMatchPatternSet,
pageMatchPatternSet,
privateWindows,
contentScript
});
}
/**
* Callback for removing an onLinkExposureUpdate listener.
* @param {linkExposureUpdateListener} listener - The listener that is being removed.
* @private
*/
function removeUpdateListener(listener) {
// If the listener has a record, unregister its content script and delete
// the record
const listenerRecord = linkExposureUpdateListeners.get(listener);
if(listenerRecord !== undefined) {
listenerRecord.contentScript.unregister();
linkExposureUpdateListeners.delete(listener);
}
}
/**
* Callback for a link exposure update message from the content script.
* @param {Options} linkExposureUpdate - The update message.
* @param {string} linkExposureUpdate.pageId - The page ID for the page where
* the content script is running.
* @param {string} linkExposureUpdate.url - The URL, without a hash, for the page
* where the content script is running.
* @param {boolean} linkExposureUpdate.privateWindow - Whether the page where the
* content script is running is in a private window.
* @param {string[]} linkExposureUpdate.linkUrls - The links on the page that the
* user was exposed to.
* @private
*/
function messageListener({ pageId, url, privateWindow, linkUrls }) {
// Increment the count of pending link exposure updates for the page
let pendingLinkExposureCount = pendingPageLinkExposureUpdates.get(pageId);
pendingLinkExposureCount = pendingLinkExposureCount === undefined ? 1 : pendingLinkExposureCount + 1;
pendingPageLinkExposureUpdates.set(pageId, pendingLinkExposureCount);
// Resolve all the link URLs in the update, converting each URL into a
// Promise<string>
const resolvedLinkUrlPromises = linkUrls.map((linkUrl) => {
return linkResolution.resolveUrl(linkUrl);
});
// Once resolution is complete, notify the linkExposureUpdate listeners
Promise.allSettled(resolvedLinkUrlPromises).then(async (results) => {
// For each link URL, if we have a resolved URL, use that
// If we don't have a resolved URL, use the original URL with
// cache, shim, and link decoration parsing
for(const i of linkUrls.keys()) {
if(results[i].status === "fulfilled") {
linkUrls[i] = results[i].value;
}
else {
linkUrls[i] = await linkResolution.resolveUrl(linkUrls[i], { request: "none" });
}
}
// If we are ignoring self links, determine whether each link URL is a self link
// by comparing to the page URL's public suffix + 1
// These are links that do not appear to be self links in the content
// script, but resolve to self links
let selfLinks = null;
if(ignoreSelfLinks) {
const pagePS1 = linkResolution.urlToPS1(url);
selfLinks = linkUrls.map(linkUrl => pagePS1 === linkResolution.urlToPS1(linkUrl))
}
// Notify the listeners
for(const [listener, listenerRecord] of linkExposureUpdateListeners) {
// Check private window and page match pattern requirements for the listener
if((!privateWindow || listenerRecord.privateWindows) &&
listenerRecord.pageMatchPatternSet.matches(url)) {
const matchingLinkUrls = [];
let nonmatchingLinkCount = 0;
for(const i of linkUrls.keys()) {
// If we are ignoring self links and a resolved link URL is a self link,
// ignore the resolved link URL
if(ignoreSelfLinks && selfLinks[i]) {
continue;
}
// Queue the link for reporting to the listener, either as a URL (if matching)
// or in a count (if nonmatching)
const linkUrl = linkUrls[i];
if(listenerRecord.linkMatchPatternSet.matches(linkUrl)) {
matchingLinkUrls.push(linkUrl);
}
else {
nonmatchingLinkCount++;
}
}
listener({
pageId,
url,
matchingLinkUrls,
nonmatchingLinkCount
});
}
}
// Decrement the count of pending link exposure updates for the page
pendingLinkExposureCount = pendingPageLinkExposureUpdates.get(pageId) - 1;
if(pendingLinkExposureCount > 0) {
pendingPageLinkExposureUpdates.set(pageId, pendingLinkExposureCount);
}
else {
pendingPageLinkExposureUpdates.delete(pageId);
}
// If there are no more pending link exposures for the page and there's a
// callback for when the page has no more pending link exposures, call the
// callback and remove it
if(pendingLinkExposureCount <= 0) {
const callback = pendingPageLinkExposureCallbacks.get(pageId);
if(callback !== undefined) {
callback();
}
pendingPageLinkExposureCallbacks.delete(pageId);
}
});
}
/**
* The details of a link exposure data event. This private type must be kept in sync with
* the public `linkExposureDataListener` type.
* @typedef {Object} LinkExposureDataDetails
* @property {string} pageId - The ID for the page, unique across browsing sessions.
* @property {string} url - The URL of the page, without any hash.
* @property {string[]} matchingLinkUrls - An array containing the resolved URLs of links
* on the page that the user was exposed to and that matched a provided match pattern.
* @property {number} nonmatchingLinkCount - The number of resolved links on the page that
* the user was exposed to and that did not match a provided match pattern.
* @private
*/
/**
* A callback function for the link exposure data event.
* @callback linkExposureDataListener
* @memberof module:linkExposure.onLinkExposureData
* @param {Object} details - Additional information about the link exposure date event.
* @param {string} details.pageId - The ID for the page, unique across browsing sessions.
* @param {string} details.url - The URL of the page, without any hash.
* @param {string[]} details.matchingLinkUrls - An array containing the resolved URLs of links
* on the page that the user was exposed to and that matched a provided match pattern.
* @param {number} details.nonmatchingLinkCount - The number of resolved links on the page that
* the user was exposed to and that did not match a provided match pattern.
*/
/**
* Options when adding a listener for the `onLinkExposureData` event. This private type must
* be kept in sync with the public `onLinkExposureData.addListener` type.
* @typedef {Object} LinkExposureDataOptions
* @property {string[]} linkMatchPatterns - Match patterns for links where the listener
* should receive individual resolved URLs. Links that do not match this match pattern are
* included in an aggregate count.
* @property {string[]} pageMatchPatterns - Match patterns for pages where the listener
* should be provided link exposure data.
* @property {boolean} [privateWindows=false] - Whether to measure links in private windows.
* @private
*/
/**
* @typedef {Object} LinkExposureDataListenerRecord
* @property {linkExposureUpdateListener} linkExposureUpdateListener - The listener for onLinkExposureUpdate
* that was created for this onLinkExposureData listener.
* @property {Map<string,LinkExposureDataDetails>} pageLinkExposureData - A map where keys are page IDs and values
* are LinkExposureDataDetails reflecting partial link exposure data for a page.
* @private
*/
/**
* A map where each key is a listener and each value is a record for that listener.
* @constant {Map<linkExposureDataListener, LinkExposureDataListenerRecord>}
* @private
*/
const linkExposureDataListeners = new Map();
/**
* Add a listener for the `onLinkExposureData` event.
* @function addListener
* @memberof module:linkExposure.onLinkExposureData
* @param {linkExposureDataListener} listener - The listener to add.
* @param {Object} options - Options for the listener.
* @param {string[]} options.linkMatchPatterns - Match patterns for links where the listener
* should receive individual resolved URLs. Links that do not match this match pattern are
* included in an aggregate count.
* @param {string[]} options.pageMatchPatterns - Match patterns for pages where the listener
* should be provided link exposure data.
* @param {boolean} [options.privateWindows=false] - Whether to measure links in private windows.
*/
/**
* Remove a listener for the `onLinkExposureData` event.
* @function removeListener
* @memberof module:linkExposure.onLinkExposureData
* @param {linkExposureDataListener} listener - The listener to remove.
*/
/**
* Whether a specified listener has been added for the `onLinkExposureData` event.
* @function hasListener
* @memberof module:linkExposure.onLinkExposureData
* @param {linkExposureDataListener} listener - The listener to check.
* @returns {boolean} Whether the listener has been added for the event.
*/
/**
* Whether the `onLinkExposureData` event has any listeners.
* @function hasAnyListeners
* @memberof module:linkExposure.onLinkExposureData
* @returns {boolean} Whether the event has any listeners.
*/
/**
* Whether the pageManager.onPageVisitStart and pageManager.onPageVisitStop listeners have been added.
* @type {boolean}
* @private
*/
let addedPageVisitListeners = false;
/**
* An event that fires when a complete set of data about link exposures on a page is available. This event
* only fires once per page, after the page visit has ended.
* @namespace
*/
export const onLinkExposureData = events.createEvent({
name: "webScience.linkExposure.onLinkExposureData",
addListenerCallback: addDataListener,
removeListenerCallback: removeDataListener,
notifyListenersCallback: () => { return false; }
});
/**
* A short period of time to wait, in milliseconds, after the onPageVisitStop event before attempting the
* onLinkExposureData event. We need to wait a short period because there can be lingering
* onLinkExposureUpdate events after onPageVisitStop (e.g., links that are still getting resolved or a
* final message from the linkExposure content script when the page visit ends).
* @constant {number}
* @private
*/
const pageVisitStopDelay = 500;
/**
* Callback for adding an onLinkExposureData listener.
* @param {linkExposureDataListener} listener - The listener.
* @param {LinkExposureDataOptions} options - A set of options for the listener.
* @private
*/
async function addDataListener(listener, options) {
if(!addedPageVisitListeners) {
// When a page visit starts, for each link exposure data listener with a matching page match pattern,
// create an object to accumulate link exposures on that page
pageManager.onPageVisitStart.addListener(pageVisitStartDetails => {
for(const linkExposureDataListenerRecord of linkExposureDataListeners.values()) {
const linkExposureUpdateListenerRecord = linkExposureUpdateListeners.get(linkExposureDataListenerRecord.linkExposureUpdateListener);
if(linkExposureUpdateListenerRecord.pageMatchPatternSet.matches(pageVisitStartDetails.url)) {
linkExposureDataListenerRecord.pageLinkExposureData.set(pageVisitStartDetails.pageId, {
pageId: pageVisitStartDetails.pageId,
url: pageVisitStartDetails.url,
matchingLinkUrls: [],
nonmatchingLinkCount: 0
});
}
}
});
// When a page visit ends, wait a short period because link resolution might still be pending
pageManager.onPageVisitStop.addListener(pageVisitStopDetails => {
setTimeout(() => {
// Create a callback function to notify onPageVisitData listeners about the link exposures on the page
// and delete the store of aggregated link exposures
const notifyListeners = () => {
for(const [linkExposureDataListener, linkExposureDataListenerRecord] of linkExposureDataListeners) {
const linkExposureDataForPage = linkExposureDataListenerRecord.pageLinkExposureData.get(pageVisitStopDetails.pageId);
// If there's at least one link exposure to report on the page, notify the listener
if(linkExposureDataForPage !== undefined) {
if((linkExposureDataForPage.matchingLinkUrls.length > 0) || (linkExposureDataForPage.nonmatchingLinkCount > 0)) {
linkExposureDataListener(linkExposureDataForPage);
}
// Delete the listener's accumulated link exposure data for the page
linkExposureDataListenerRecord.pageLinkExposureData.delete(pageVisitStopDetails.pageId);
}
}
};
// If there are no pending link exposure updates for the page, immediately call the callback function
if(!pendingPageLinkExposureUpdates.has(pageVisitStopDetails.pageId)) {
notifyListeners();
}
// Otherwise, set the callback function to be called when there are no more pending link exposures for
// the page
else {
pendingPageLinkExposureCallbacks.set(pageVisitStopDetails.pageId, notifyListeners);
}
}, pageVisitStopDelay);
});
addedPageVisitListeners = true;
}
// Create a record of the onLinkExposureData listener, including a new onLinkExposureUpdate listener
const linkExposureDataListenerRecord = {
pageLinkExposureData: new Map(),
// When the onLinkExposureUpdate listener fires for this onLinkExposureData listener, accumulate
// the link exposures on the page for this listener
linkExposureUpdateListener: linkExposureUpdateDetails => {
const linkExposureDataForPage = linkExposureDataListenerRecord.pageLinkExposureData.get(linkExposureUpdateDetails.pageId);
if(linkExposureDataForPage !== undefined) {
linkExposureDataForPage.matchingLinkUrls = linkExposureDataForPage.matchingLinkUrls.concat(linkExposureUpdateDetails.matchingLinkUrls);
linkExposureDataForPage.nonmatchingLinkCount += linkExposureUpdateDetails.nonmatchingLinkCount;
}
}
};
linkExposureDataListeners.set(listener, linkExposureDataListenerRecord);
onLinkExposureUpdate.addListener(linkExposureDataListenerRecord.linkExposureUpdateListener, options);
}
/**
* Callback for removing an onLinkExposureData listener.
* @param {linkExposureDataListener} listener - The listener that is being removed.
* @private
*/
function removeDataListener(listener) {
// If the listener has a record, unregister its onLinkExposureUpdate listener
// and delete the record
const listenerRecord = linkExposureDataListeners.get(listener);
if(listenerRecord !== undefined) {
onLinkExposureUpdate.removeListener(listenerRecord.linkExposureUpdateListener);
linkExposureDataListeners.delete(listener);
}
}
<|start_filename|>rollup-plugin.js<|end_filename|>
const fs = require("fs");
const path = require("path");
const rollup = require("rollup");
const commonjs = require("@rollup/plugin-commonjs");
const nodeResolve = require("@rollup/plugin-node-resolve").nodeResolve;
const jsdom = require("jsdom");
const { JSDOM } = jsdom;
/**
* The WebScience Rollup plugin. This plugin is necessary because certain WebScience assets (e.g., content
* scripts and HTML) are not handled by ordinary Rollup bundling. Developers building browser extensions
* with WebScience should, for all anticipated use cases, be able to use the Rollup plugin as-is.
*
* The plugin involves the following steps.
* * Identify WebScience asset dependencies, which are import statements that begin with an "include:" schema.
* * Resolve each WebScience asset dependency to a string containing an output path relative to the extension
* base directory. This format is required by WebExtensions APIs.
* * Allow Rollup tree shaking to occur, so that only necessary asset dependencies are bundled.
* * If an asset dependency is a content script (ending in .content.js), use Rollup to bundle the content script
* to the output directory in IIFE format, with support for Node module resolution and CommonJS module wrapping.
* This step allows WebScience content scripts to use components of the library and npm dependencies, and it
* makes debugging content script behavior straightforward.
* * If an asset dependency is an HTML file (ending in .html), copy the file to the output directory, parse the file,
* identify script and stylesheet dependencies (`<script src="..."></script>` and `<link rel="stylesheet" href="...">`),
* and copy those additional dependencies to the output directory. The plugin currently only supports additional HTML
* dependencies that are in the same source directory as the HTML file.
* * If an asset dependency is not one of the above types, copy the file to the output directory.
* @param {Object} [options] - Options for the plugin.
* @param {string} [options.manifestPath="./manifest.json"] - The path to the WebExtensions manifest.json, either
* absolute or relative to the current working directory. The plugin requires this path so that it can generate
* paths relative to the extension base directory for WebExtensions APIs.
* @param {string} [options.outputDirectory="./dist/webScience/"] - The directory where the plugin should output
* required dependencies, either absolute or relative to the current working directory. This directory must
* be equal to or a subdirectory of the directory containing the WebExtensions manifest.json.
* @returns {Object} - The generated Rollup plugin.
*/
module.exports = function webScienceRollupPlugin({
manifestPath = "./manifest.json",
outputDirectory = "./dist/webScience/"
} = {
manifestPath: "./manifest.json",
outputDirectory: "./dist/webScience/"
}) {
const includeScheme = "include:";
// If the manifest path or output directory is relative, convert it to absolute with the current working directory
manifestPath = path.resolve(process.cwd(), manifestPath);
const manifestDirectory = path.dirname(manifestPath);
outputDirectory = path.resolve(process.cwd(), outputDirectory) + path.sep;
// Check that the output directory is either the manifest directory or a subdirectory of the manifest directory
if(path.relative(manifestDirectory, outputDirectory).startsWith("..")) {
throw new Error("Error: the Webscience Rollup plugin requires that the output directory be either the same as the extension manifest directory or a subdirectory of that directory.");
}
// Check that the WebExtensions manifest path is correct, since we need to generate paths relative to the manifest
if(!fs.existsSync(manifestPath)) {
throw new Error(`Error: the Webscience Rollup plugin requires either running Rollup in the base directory for the extension or specifying an extension manifest path.`);
}
// Check that the output directory is a directory path
if(!outputDirectory.endsWith(path.sep)) {
throw new Error("Error: the Webscience Rollup plugin requires a valid directory path.")
}
// If the output directory doesn't already exist, create it
if(!fs.existsSync(outputDirectory)) {
fs.mkdirSync(outputDirectory, {
recursive: true
});
}
/**
* Generate the absolute file input path, absolute file output path, and output path relative to the WebExtensions manifest
* directory for an import ID that uses the include scheme.
*/
const pathsFromId = id => {
// Remove the scheme to obtain the absolute file input path
const inputPath = id.substring(includeScheme.length);
// Add the file name to the output directory to obtain the absolute file output path
const outputPath = outputDirectory + path.basename(inputPath);
// Generate the output path relative to the WebExtensions manifest directory, since that's what WebExtensions APIs require
const relativeOutputPath = path.relative(manifestDirectory, outputPath);
return { inputPath, outputPath, relativeOutputPath };
};
const plugin = {
name: "webscience-rollup-plugin",
// When the Rollup build starts, check that plugin dependencies are present
buildStart({ plugins }) {
const pluginDependencies = {
"commonjs": "@rollup/plugin-commonjs",
"node-resolve": "@rollup/plugin-node-resolve"
};
const pluginNames = new Set();
for(const plugin of plugins) {
pluginNames.add(plugin.name);
}
for(const pluginName in pluginDependencies) {
if(!pluginNames.has(pluginName)) {
throw new Error(`Error: bundling with the WebScience library requires ${pluginDependencies[pluginName]}.`);
}
}
},
async resolveId(source, importer) {
// Ignore bundling entry points
if(!importer) {
return null;
}
// Ignore import statements that don't start with the include scheme
if(!source.startsWith(includeScheme)) {
return null;
}
// Remove the scheme, resolve the absolute path for the import, then restore the scheme
source = source.substring(includeScheme.length);
const resolution = await this.resolve(source, importer, { skipSelf: true });
if(resolution === null || !("id" in resolution)) {
throw new Error(`Error: unable to resolve WebScience dependency: ${source}.`);
}
return includeScheme + resolution.id;
},
async load(id) {
// Ignore resolved import statements that don't start with the include scheme
if(!id.startsWith(includeScheme)) {
return null;
}
// Return a string that is the output path relative to the WebExtensions manifest directory, since that's what WebExtensions APIs require
return `export default "${pathsFromId(id).relativeOutputPath}";`;
},
// Generate output files with the generateBundle hook, rather than the load hook, so we can benefit from tree shaking
async generateBundle(options, bundle) {
// Identify all the import IDs with the include scheme
const idsWithIncludeScheme = new Set();
for(const info of Object.values(bundle)) {
if(info.type === "chunk") {
for(const id in info.modules) {
if(id.startsWith(includeScheme)) {
idsWithIncludeScheme.add(id);
}
}
}
}
// Generate output files for each import ID with the include scheme
for(const id of idsWithIncludeScheme) {
const { inputPath, outputPath } = pathsFromId(id);
// If the file is a content script (i.e., ends with .content.js), bundle it to the output directory in IIFE format
if(inputPath.endsWith(".content.js")) {
const outputBundle = await rollup.rollup({
input: inputPath,
plugins: [
// If we encounter an import with the include scheme when bundling, just return an empty string and let Rollup tree shake the import
{
name: "ignore-include-scheme",
resolveId: plugin.resolveId,
load(id) {
if(id.startsWith(includeScheme)) {
return `export default "";`;
}
}
},
commonjs(),
nodeResolve({
browser: true,
moduleDirectories: [
process.cwd() + path.sep + "node_modules"
]
})
]
});
await outputBundle.write({
output: {
file: outputPath,
format: "iife"
}
});
await outputBundle.close();
}
// If the file is HTML (i.e., ends with .html), copy the file and any script or stylesheet dependencies
else if(inputPath.endsWith("html")) {
// Copy the HTML file
fs.copyFileSync(inputPath, outputPath);
// Parse the HTML file and extract script and stylesheet dependency paths
const html = fs.readFileSync(inputPath, "utf8");
const dom = new JSDOM(html);
const embeddedFileRelativePaths = new Set();
const scriptElements = dom.window.document.querySelectorAll("script[src]");
for(const scriptElement of scriptElements) {
embeddedFileRelativePaths.add(scriptElement.src);
}
const stylesheetElements = dom.window.document.querySelectorAll("link[rel=stylesheet][href]");
for(const stylesheetElement of stylesheetElements) {
embeddedFileRelativePaths.add(stylesheetElement.href);
}
// Generate output paths for dependencies and copy the files
for(const embeddedFileRelativePath of embeddedFileRelativePaths) {
const embeddedFileAbsolutePath = path.resolve(path.dirname(inputPath), embeddedFileRelativePath);
if(path.dirname(embeddedFileAbsolutePath) === path.dirname(inputPath)) {
fs.copyFileSync(embeddedFileAbsolutePath, outputDirectory + path.basename(embeddedFileAbsolutePath));
}
else {
console.warn(`Warning: the Webscience Rollup plugin only supports HTML script and stylesheet embeds in the same directory as the HTML file. Unable to embed ${embeddedFileAbsolutePath} in ${outputPath}.`);
}
}
}
// For all other file types, copy the file to the output directory
else {
fs.copyFileSync(inputPath, outputPath);
}
}
}
};
return plugin;
};
| throneless-tech/web-science |
<|start_filename|>mm-mailing-list.html<|end_filename|>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<HTML LANG="EN-US">
<HEAD>
<!-- improve mobile display -->
<META NAME="viewport" CONTENT="width=device-width, initial-scale=1.0">
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
<TITLE>Metamath Mailing List</TITLE>
<LINK REL="shortcut icon" HREF="favicon.ico" TYPE="image/x-icon">
</HEAD>
<BODY BGCOLOR="#FFFFFF" STYLE="padding: 0px 8px">
<H1>Metamath Mailing List</H1>
<P>
We welcome everyone who is interested in Metamath to join the
<a href="https://groups.google.com/forum/#!forum/metamath">Metamath
mailing list</a>, which discusses all topics related to Metamath.
Metamath is a tiny language that can express theorems in abstract mathematics, accompanied by proofs that can be verified by a computer program.
</P>
<P>
We ask that you follow common netiquette rules for mailing lists. In short, respect others' time. For example:
</P>
<UL>
<LI>Use a descriptive subject that describes the content of your post. If you're changing to a new subject, please start a new thread with the new subject. You can show a relationship with a previous subject by using a subject like NEW_SUBJECT (was: OLD_SUBJECT).</LI>
<LI>Please avoid replying to yourself (unless it's just occasionally for a quick correction), and allow others time to respond. The point of a mailing list is to enable discussion, not monologue.</LI>
<LI>Avoid unnecessary material. Don't just post me-too posts, and quote only what you need to make your point clear.</LI>
<LI>Use bottom-posting or interleaved quoting. This makes posts easier to understand.</LI>
<LI>Try to post in normal next and *not* rich text. Posting in HTML is allowed, but don't assume that everyone reading the message can see the formatting.</LI>
<LI>Don't shout (that is, post in all-caps).</LI>
<LI>Avoid personal attacks. Disagreement on ideas is fine (and expected).</LI>
</UL>
<P>
There may be occasional lapses, the point is that everyone should *try* to respect others' time.
</P>
<P>
Postings that do not follow these guidelines are subject to removal, and posters who repeatedly do not follow these guidelines are subject to added moderation or removal. We need to be able to do this to ensure that the mailing list meets its objective.
</P>
<P>
Some additional guidelines and clarifications can be found at:
</P>
<UL>
<LI>
<A HREF="https://en.opensuse.org/openSUSE:Mailing_list_netiquette">openSUSE:Mailing list netiquette</A></LI>
<LI>
<A HREF="https://lifehacker.com/basic-etiquette-for-email-lists-and-forums-5473859">Basic Etiquette for Email Lists and Forums</A></LI>
<LI>
<A HREF="http://linux.sgms-centre.com/misc/netiquette.php">USENET and Mailing List posting netiquette</A></LI>
</UL>
</BODY>
</HTML>
<|start_filename|>mmnf.raw.html<|end_filename|>
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<HTML LANG="EN-US">
<HEAD>
<!-- improve mobile display -->
<META NAME="viewport" CONTENT="width=device-width, initial-scale=1.0">
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
<TITLE>Home Page - New Foundations Explorer</TITLE>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<LINK REL="shortcut icon" HREF="favicon.ico" TYPE="image/x-icon">
<STYLE TYPE="text/css">
<!--
/* Math symbol image will be shifted down 4 pixels to align with normal
text for compatibility with various browsers. The old ALIGN=TOP for
math symbol images did not align in all browsers and should be deleted.
All other images must override this shift with STYLE="margin-bottom:0px".
(2-Oct-2015 nm) */
img { margin-bottom: -4px }
-->
</STYLE>
</HEAD>
<!-- <BODY BGCOLOR="#EEFFFA"> -->
<BODY BGCOLOR="#FFFFFF" STYLE="padding: 0px 8px">
<TABLE BORDER=0 CELLSPACING=0 CELLPADDING=0 WIDTH="100%">
<TR>
<TD ALIGN=LEFT VALIGN=TOP><A HREF="../index.html"><IMG SRC="nf.gif"
BORDER=0
ALT="Metamath Home"
TITLE="Metamath Home"
HEIGHT=32 WIDTH=32 ALIGN=TOP STYLE="margin-bottom:0px"></A>
</TD>
<TD ALIGN=CENTER VALIGN=TOP><FONT SIZE="+3"
COLOR="#006633"><B>New Foundations Explorer Home Page</B></FONT>
</TD>
<TD NOWRAP ALIGN=RIGHT VALIGN=TOP><FONT SIZE=-2
FACE=sans-serif> <A HREF="wn.html">First ></A><BR><A
HREF="elopprim.html">Last ></A></FONT>
</TD>
</TR>
<TR>
<TD COLSPAN=3 ALIGN=LEFT VALIGN=TOP><FONT SIZE=-2
FACE=sans-serif>
<A HREF="../mm.html">Mirrors</A> >
<A HREF="../index.html">Home</A> >
NFE Home >
<A HREF="mmtheorems.html">Th. List</A>
</FONT>
</TD>
</TR>
</TABLE>
<HR NOSHADE SIZE=1>
<CENTER>
<B><FONT COLOR="#006633">Created by <NAME></FONT></B>
</CENTER>
<HR NOSHADE SIZE=1>
<B><FONT COLOR="#006633">New Foundations Proof Explorer</FONT></B>
<TABLE>
<TR><TD ROWSPAN=2>
<I>New Foundations</I>
(<A HREF="http://en.wikipedia.org/wiki/New_Foundations">Wikipedia</A>
[external], <A HREF="http://plato.stanford.edu/entries/quine-nf/">
Stanford Encyclopedia of Philosophy</A> [external]) is an alternative set
theory to the Zermelo-Fraenkel set theory presented in the regular Metamath
Proof Explorer.
Unlike the Zermelo-Fraenkel system with the Axiom of Choice
(known as ZFC), New Foundations is a direct derivative of
the set theory originally presented in <I>Principia Mathematica</I>.
</TD></TR></TABLE>
<HR NOSHADE SIZE=1>
<TABLE WIDTH="100%"><TR>
<TD VALIGN=top>
<B><FONT COLOR="#006633">Contents of this page</FONT></B>
<MENU>
<LI> <A HREF="#strat">Stratification</A></LI>
<LI> <A HREF="#axioms">The axioms</A></LI>
<LI> <A HREF="#theorems">Some theorems</A></LI>
<LI> <A HREF="#bib">Bibliography</A></LI>
</MENU></TD>
<TD VALIGN=top>
<B><FONT COLOR="#006633">Related pages</FONT></B>
<MENU>
<LI> <A HREF="mmtheorems.html">Table of Contents and Theorem List</A></LI>
<LI> <A HREF="mmbiblio.html">Bibliographic Cross-Reference</A></LI>
<LI> <A HREF="mmdefinitions.html">Definition List</A></LI>
<LI> <A HREF="mmascii.html">ASCII Equivalents for Text-Only Browsers</A></LI>
<LI> <A HREF="../metamath/nf.mm">Metamath database nf.mm (ASCII file)</A></LI>
</MENU>
<B><FONT COLOR="#006633">External links</FONT></B>
<MENU>
<LI> <A HREF="https://github.com/sctfn/metamath-nf/">GitHub repository</A></LI>
</MENU>
</TD>
</TR></TABLE>
<HR NOSHADE SIZE=1><A NAME="strat"></A><B><FONT COLOR="#006633">
Stratification</FONT></B>
<p>
In <I>Principia Mathematica</I>, Russell and Whitehead used a typing system
to avoid the paradoxes of naive set theory,
rather than restrict the size of sets (as Zermelo-Fraenkel theory does).
This typing system was
eventually refined by Russell down to Typed Set Theory (TST).
In TST, unlimited comprehension is allowed
(approximately,
A ` e. ` ` _V ` is a theorem).
TST avoids the standard paradoxes by being a multi-sorted system.
That is, there are variables of type 0, 1, 2,... The WFFs are
restricted so that you must say
x[n] = y[n] and x[n] ` e. ` y[n+1], where n
is a variable type.
This means, among other things, that
x ` e. ` x is not a well-formed formula,
so we can't even sensibly speak of the Russell class.
Thus TST counters <A HREF="ru.html">Russell's Paradox</A>.
<p>
Now, consider introducing virtual classes into this theory. You need
to say things like V[n] = { x[n] | x[n] = x[n] } for each type n.
This leads to a "hall of mirrors" type situation: each named object is
duplicated for each type.
<p>
Quine noticed this and proposed collapsing
the whole theory into a one-sorted set theory, where the comprehension
axiom is restricted to formulas where you could theoretically introduce
subscripts to make the formula a WFF of TST.
Quine described this approach in a paper titled
"New Foundations for Mathematical Logic,"
so this approach is now called "New Foundations" (NF)
<A HREF="#Quine2">[Quine2]</A>.
For more details, see the
<A HREF="http://en.wikipedia.org/wiki/New_Foundations">Wikipedia
article on NF</A>.
<HR NOSHADE SIZE=1><A NAME="axioms"></A><B><FONT COLOR="#006633">
The axioms</FONT></B>
<p>
The axioms begin with traditional axioms for
classical first order logic with equality.
See the regular
<a href="/mpeuni/mmset.html">Metamath Proof Explorer</a>
for discussions about these
axioms and some of their implications.
<p>
The key axioms specific to NF are
<a href="ax-ext.html">extensionality</a>
(two sets are identical if they contain the same elements) and
a comprehension schema.
Extensionality is formally defined as:
<CENTER><TABLE BORDER CELLSPACING=0 BGCOLOR="#EEFFFA"
SUMMARY="Extensionality">
<CAPTION><B>Extensionality</B></CAPTION>
<TR><TH>Name</TH><TH>Ref</TH><TH>Expression</TH></TR>
<TR ALIGN=LEFT>
<TD><A HREF="ax-ext.html">Axiom of Extensionality</A></TD>
<TD><FONT COLOR="#006633"> ~ ax-ext </FONT></TD>
<TD>` |- ( A. z ( z e. x <-> z e. y ) -> x = y ) ` </TD></TR>
</TABLE>
</CENTER>
<p>
The comprehension schema is stated using the concept of stratified formula;
the approach is the Stratification Axiom from [Quine2].
In short,
a well-formed formula using only propositional symbols, predicate
symbols, and ` e. ` is "stratified" iff you can make a (metalogical)
mapping from the variables to the natural numbers such that any formulas
of the form x = y have the same number,
and any formulas of the form
x ` e. ` y have
x as one less than y.
Quine's stratification axiom states that there is a
set corresponding to any stratified formula.
We use Hailperin's axioms and prove existence of stratified sets using
Hailperin's algorithm.
Thus the stratification axiom of [Quine2] is
implemented in this formalization using the axioms P1 through P9 from [Hailperin] and
the <a href="ax-sn.html">Axiom of Singleton ax-sn</a>:
<CENTER>
<TABLE BORDER CELLSPACING=0 BGCOLOR="#EEFFFA"
SUMMARY="List of New Foundations Stratification Axioms">
<CAPTION><B>New Foundations Stratification Axioms</B></CAPTION><TR
ALIGN=LEFT><TD><B>Name</B></TD><TD><B>
Ref</B></TD><TD><B>Expression (see link for any distinct variable requirements)
</B></TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-nin.html">Axiom of Anti-Intersection (P1)</A></TD>
<TD> ~ ax-nin </TD>
<TD>` |- E. z A. w ( w e. z <-> ( w e. x -/\ w e. y ) ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-si.html">Axiom of Singleton Image (P2)</A></TD>
<TD> ~ ax-si </TD>
<TD>` |- E. y A. z A. w ( << { z } , { w } >> e. y <-> << z , w >> e. x ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-sn.html">Axiom of Singleton (not directly stated in Hailperin)</A></TD>
<TD> ~ ax-sn </TD>
<TD>` |- E. y A. z ( z e. y <-> z = x ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-ins2.html">Axiom of Insertion Two (P3)</A></TD>
<TD> ~ ax-ins2 </TD>
<TD>` |- E. y A. z A. w A. t ( << { { z } } , << w , t >> >> e. y <-> << z , t >> `
` e. x ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-ins3.html">Axiom of Insertion Three (P4)</A></TD>
<TD> ~ ax-ins3 </TD>
<TD>` |- E. y A. z A. w A. t ( << { { z } } , << w , t >> >> e. y <-> << z , w >> `
` e. x ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-xp.html">Axiom of Cross Product (P5)</A></TD>
<TD> ~ ax-xp </TD><TD>` |- E. y A. z ( z e. y <-> E. w E. t ( z = << w , t >> /\ t e. x ) ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-typlower.html">Axiom of Type Lowering (P6)</A></TD>
<TD> ~ ax-typlower </TD>
<TD>` |- E. y A. z ( z e. y <-> A. w << w , { z } >> e. x ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-cnv.html">Axiom of Converse (P7)</A></TD>
<TD> ~ ax-cnv </TD>
<TD>` |- E. y A. z A. w ( << z , w >> e. y <-> << w , z >> e. x ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-1c.html">Axiom of Cardinal One (P8)</A></TD>
<TD> ~ ax-1c </TD>
<TD>` |- E. x A. y ( y e. x <-> E. z A. w ( w e. y <-> w = z ) ) ` </TD></TR>
<TR BGCOLOR="#EEFFFA" ALIGN=LEFT>
<TD><A HREF="ax-sset.html">Axiom of Subset Relationship (P9)</A></TD>
<TD> ~ ax-sset </TD>
<TD>` |- E. x A. y A. z ( << y , z >> e. x <-> A. w ( w e. y -> w e. z ) ) ` </TD></TR>
</TABLE>
</CENTER>
<p>
The usual definition of the ordered pair, first proposed by Kuratowski
in 1921 and used in the regular Metamath Proof Explorer,
has a serious drawback for NF and related theories that use stratification.
The Kuratowski ordered pair
is defined as << x , y >> = { { x } , { x , y } }.
This leads to the ordered pair having a type two greater than its arguments.
For example, z in << << x , y >> , z >>
would have a different type than x and y,
which makes multi-argument functions extremely awkward to work with.
Operations such as "1st" and complex "+" would not form sets
in NF with the Kuratowski ordered pairs.
<p>
In contrast, the Quine definition of ordered pairs,
defined in definition <a href="df-op.html">df-op</a>, is type level.
That is, <. x , y >. has the same type as x and y,
which means that the same holds of <. <. x , y >. , z >.
This means that "1st" is a set with the Quine definition,
as is complex "+".
The Kuratowski ordered pair is defined
(as <a href="df-opk.html">df-opk</a>), because
it is a simple definition that can be used by the set construction axioms
that follow it, but for typical uses the Quine definition of ordered pairs
<a href="df-op.html">df-op</a> is used instead.
<p>
Perhaps one of the most remarkable aspects of NF is that the
<A HREF="nchoice.html">Axiom of Choice (an axiom of the widely-used ZFC system)
can be disproven in NF</A>, a result
proven in [Specker].
As a corollary, NF proves infinity.
<p>
There are several systems <i>related</i> to NF.
In particular, NFU is a small modification of NF that also allows
urelements (multiple distinct objects lacking members).
NFU corresponds to a modified type theory TSTU,
where type 0 has urelements, not just a single empty set.
NFU is consistent with both Infinity and Choice, so both can be added to NFU.
NFU + Infinity + Choice has the same consistency strength
as the theory of types with the Axiom of Infinity.
NFU + Infinity + Choice has been extended further, e.g., with various strong
axioms of infinity (similar to ways that ZFC has been extended).
Randall Holmes states that <a href="http://math.boisestate.edu/~~holmes/holmes/nf.html#Consistent">"Extensions of NFU
are adequate vehicles for mathematics in a basically familiar style"</a>.
NFU is not further discussed here.
<p>
A fair amount of the definitions and
theorems (notably the ones on boolean set operations)
are taken verbatim from the regular Metamath Proof Explorer
source file set.mm (based on ZFC).
This also follows the development in [Rosser] fairly closely.
An unusual aspect is the
<a href="df-tcfn.html">stratified T-raising function TcFn</a>.
The work to specifically formalize New Foundations in metamath
was originally created by <NAME>.
Those who are interested in New Foundations may want to look at the
<a href="http://math.boisestate.edu/~~holmes/holmes/nf.html">New Foundations
home page</a>, as well as a
<a href="http://math.boisestate.edu/~~holmes/">proof of the consistency of
New Foundations by Randall Holmes</a>.
The descriptions given here are based on a
<a href="https://groups.google.com/forum/#!topic/metamath/yj1j4Ebb1bI">discussion on the metamath mailing list</a>.
<HR NOSHADE SIZE=1><A NAME="theorems"></A><B><FONT COLOR="#006633">
Some theorems</FONT></B>
<MENU>
<LI> <A HREF="1p1e2c.html">Proof of <I>Principia
Mathematica</I>'s version of 1+1=2</A></LI>
<LI> <A HREF="vinf.html">Proof of the Axiom of Infinity</A></LI>
<LI> <A HREF="ru.html">Russell's Paradox</A></LI>
<LI> <A HREF="vvex.html">Proof that the universal class exists</A></LI>
<LI> <A HREF="nchoice.html">Disproof of the Axiom of Choice</A></LI>
</MENU>
<HR NOSHADE SIZE=1><A NAME="bib"></A><B><FONT
COLOR="#006633">Bibliography</FONT></B>
<OL>
<LI><A NAME="BellMachover"></A> [BellMachover] <NAME>., and <NAME>, <I>A Course in Mathematical Logic,</I> North-Holland,
Amsterdam (1977) [QA9.B3953].</LI>
<LI><A NAME="ChoquetDD"></A> [ChoquetDD] Choquet-Bruhat, Yvonne and <NAME>, with <NAME>, <I>Analysis, Manifolds and
Physics,</I> Elsevier Science B.V., Amsterdam (1982) [QC20.7.A5C48
1981].</LI>
<LI><A NAME="Eisenberg"></A> [Eisenberg] <NAME>, <I>Axiomatic Theory of
Sets and Classes,</I> <NAME> and Winston, Inc., New York (1971)
[QA248.E36].</LI>
<LI><A NAME="Enderton"></A> [Enderton] Enderton, <NAME>., <I>Elements of Set
Theory,</I> Academic Press, Inc., San Diego, California (1977)
[QA248.E5].</LI>
<LI><A NAME="Gleason"></A> [Gleason] Gleason, <NAME>., <I>Fundamentals of
Abstract Analysis,</I> Jones and Bartlett Publishers, Boston (1991)
[QA300.G554].</LI>
<LI><A NAME="Hailperin"></A> [Hailperin] Hailperin, Theodore, "A Set of
Axioms for Logic," <I>Journal of Symbolic Logic,</I> 9:1-14 (1944) [BC1.J6].</LI>
<LI><A NAME="Hamilton"></A> [Hamilton] <NAME>., <I>Logic for
Mathematicians,</I> Cambridge University Press, Cambridge, revised
edition (1988) [QA9.H298 1988].</LI>
<LI><A NAME="Hitchcock"></A> [Hitchcock] <NAME>, <I>The
peculiarities of Stoic propositional logic</I>, McMaster University;
available at <A
HREF="http://www.humanities.mcmaster.ca/~~hitchckd/peculiarities.pdf">
http://www.humanities.mcmaster.ca/~~hitchckd/peculiarities.pdf</A>
(retrieved 3 Jul 2016).</LI>
<LI><A NAME="Holmes"></A> [Holmes] <NAME>, <I>Elementary Set Theory With a Universal Set,</I> Web. Accessed 23 Feb 2015. <A HREF="http://math.boisestate.edu/~~holmes/holmes/head.pdf">Link</A></LI>
<LI><A NAME="Jech"></A> [Jech] <NAME>, <I>Set Theory,</I>
Academic Press, San Diego (1978) [QA248.J42].</LI>
<LI><A NAME="KalishMontague"></A> [KalishMontague] <NAME>. and R.
Montague, "On Tarski's formalization of predicate logic with
identity," <I>Archiv für Mathematische Logik und
Grundlagenforschung,</I> 7:81-101 (1965) [QA.A673].</LI>
<LI><A NAME="Kunen"></A> [Kunen] Kunen, Kenneth, <I>Set Theory: An
Introduction to Independence Proofs,</I> Elsevier Science B.V.,
Amsterdam (1980) [QA248.K75].</LI>
<LI><A NAME="KuratowskiMostowski"></A> [KuratowskiMostowski] Kuratowski, K.
and <NAME>, <I>Set Theory: with an Introduction to
Descriptive Set Theory,</I> 2nd ed., North-Holland,
Amsterdam (1976) [QA248.K7683 1976].</LI>
<LI><A NAME="Levy"></A> [Levy] Levy, Azriel, <I>Basic Set Theory</I>,
Dover Publications, <NAME>. (2002) [QA248.L398 2002]. </LI>
<LI><A NAME="Lopez-Astorga"></A> [Lopez-Astorga] <NAME>,
"The First Rule of Stoic Logic and its Relationship with the
Indemonstrables", <I>Revista de Filosofía Tópicos</I> (2016);
available at <A HREF="http://www.scielo.org.mx/pdf/trf/n50/n50a1.pdf">
http://www.scielo.org.mx/pdf/trf/n50/n50a1.pdf</A> (retrieved 3 Jul
2016).</LI>
<LI><A NAME="Margaris"></A> [Margaris] <NAME>, <I>First Order
Mathematical Logic,</I> Blaisdell Publishing Company, Waltham,
Massachusetts (1967) [QA9.M327].</LI>
<LI><A NAME="Megill"></A><A NAME="bibmegill"></A> [Megill] <NAME>.,
"A Finitely Axiomatized Formalization of Predicate Calculus with
Equality," <I>Notre Dame Journal of Formal Logic,</I> 36:435-453
(1995) [QA.N914]; available at <A
HREF="http://projecteuclid.org/euclid.ndjfl/1040149359"
>http://projecteuclid.org/euclid.ndjfl/1040149359</A> (accessed
11 Nov 2014); the <A HREF="../downloads/finiteaxiom.pdf">PDF
preprint</A> has the same content (with corrections) but pages are
numbered 1-22, and the database references use the numbers printed on the
page itself, not the PDF page numbers.</LI>
<LI><A NAME="Mendelson"></A> [Mendelson] <NAME>, <I>Introduction to
Mathematical Logic,</I> 2nd ed., <NAME> (1979) [QA9.M537].</LI>
<LI><A NAME="Monk1"></A> [Monk1] <NAME>, <I>Introduction to Set
Theory,</I> McGraw-Hill, Inc. (1969) [QA248.M745].</LI>
<LI><A NAME="Monk2"></A> [Monk2] <NAME>, "Substitutionless
Predicate Logic with Identity," <I>Archiv für Mathematische Logik
und Grundlagenforschung,</I> 7:103-121 (1965) [QA.A673].</LI>
<LI><A NAME="Pfenning"></A> [Pfenning] <NAME>,
<I>Automated Theorem Proving,</I> Carnegie-Mellon University (April 13, 2004);
available at <A
HREF="http://www.cs.cmu.edu/~~fp/courses/atp/handouts/atp.pdf">http://www.cs.cmu.edu/~~fp/courses/atp/handouts/atp.pdf</A>
and <A
HREF="http://web.archive.org/web/20160304013704/http://www.cs.cmu.edu/~~fp/courses/atp/handouts/atp.pdf">http://web.archive.org/web/20160304013704/http://www.cs.cmu.edu/~~fp/courses/atp/handouts/atp.pdf</A>
(retrieved 7 Feb 2017).</LI>
<LI><A NAME="Quine"></A> [Quine] Quine, <NAME>, <I>Set Theory
and Its Logic,</I> Harvard University Press, Cambridge, Massachusetts,
revised edition (1969) [QA248.Q7 1969].</LI>
<LI><A NAME="Quine2"></A> [Quine2] <NAME>., 1937a, "New Foundations
for Mathematical Logic," <I>American Mathematical Monthly,</I>
44: 70-80 (1937) [QA1.A515].</LI>
<LI><A NAME="Rosser"></A> [Rosser] Rosser, <NAME>., <I>Logic for Mathematicians,</I> Dover Publications, Mineola, N.Y. (2008) [BC135.R58 2008].</LI>
<LI><A NAME="Sanford"></A> [Sanford] Sanford, <NAME>., <I>If P, then Q:
Conditionals and the Foundations of Reasoning</I>, 2nd ed., Routledge
Taylor & Francis Group (2003); ISBN 0-415-28369-8; available at <A
HREF="https://books.google.com/books?id=h_AUynB6PA8C&pg=PA39#v=onepage&q&f=false">https://books.google.com/books?id=h_AUynB6PA8C&pg=PA39#v=onepage&q&f=false</A>
(retrieved 3 Jul 2016).</LI>
<LI><A NAME="Schechter"></A> [Schechter] <NAME>, <I>Handbook of
Analysis and Its Foundations</I>, Academic Press, San Diego (1997)
[QA300.S339].</LI>
<LI><A NAME="Specker"></A> [Specker] <NAME>.,
"The axiom of choice in Quine's new foundations for mathematical
logic," <I>Proceedings of the National Academy of Sciences of the
USA,</I> 39:972-975 (1953) [Q11.N26]</LI>
<LI><A NAME="Stoll"></A> [Stoll] Stoll, <NAME>., <I>Set Theory and Logic,</I>
Dover Publications, Inc. (1979) [QA248.S7985 1979].</LI>
<LI><A NAME="Suppes"></A> [Suppes] Suppes, Patrick, <I>Axiomatic Set Theory,</I>
Dover Publications, Inc. (1972) [QA248.S959].</LI>
<LI><A NAME="TakeutiZaring"></A> [TakeutiZaring] <NAME>, and
<NAME>, <I>Introduction to Axiomatic Set Theory,</I>
Springer-Verlag, New York, second edition (1982) [QA248.T136 1982].</LI>
<LI><A NAME="Tarski"></A> [Tarski] Tarski, Alfred, "A Simplified
Formalization of Predicate Logic with Identity," <I>Archiv für
Mathematische Logik und Grundlagenforschung,</I> 7:61-79 (1965)
[QA.A673].</LI>
<LI><A NAME="WhiteheadRussell"></A> [WhiteheadRussell] Whitehead, <NAME>, and <NAME>, <I>Principia Mathematica to *56,</I>
Cambridge University Press, Cambridge, 1962 [QA9.W592 1962].</LI>
</OL>
<HR NOSHADE SIZE=1>
<TABLE BORDER=0 WIDTH="100%"><TR>
<TD ALIGN=LEFT VALIGN=TOP WIDTH="25%"><FONT SIZE=-2 FACE=sans-serif>
</FONT></TD>
<TD NOWRAP ALIGN=CENTER><I><FONT SIZE=-1>This
page was last updated on 28-May-2018.</FONT></I><BR><FONT
FACE="ARIAL" SIZE=-2>Your
comments are welcome:
<NAME> <A HREF="../email.html"><IMG BORDER=0
SRC="_nmemail.gif"
ALT="nm at alum dot mit dot edu"
TITLE="nm at alum dot mit dot edu"
WIDTH=90 HEIGHT=13 STYLE="margin-bottom:-3px"></A><BR>
Copyright terms:
<A HREF="../copyright.html#pd">Public domain</A></FONT></TD>
<TD ALIGN=RIGHT VALIGN=BOTTOM WIDTH="25%">
<FONT FACE="ARIAL" SIZE=-2>
<A
HREF="http://validator.w3.org/check?uri=referer">W3C HTML validation</A>
[external]
</FONT>
</TD>
</TR></TABLE>
</BODY>
</HTML>
| TjWhale/set.mm |
<|start_filename|>include/drivers/st/stm32_rng.h<|end_filename|>
/*
* Copyright (c) 2018, STMicroelectronics - All Rights Reserved
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef STM32_RNG_H
#define STM32_RNG_H
int stm32_rng_read(uint8_t *out, uint32_t size);
int stm32_rng_init(void);
#endif /* STM32_RNG_H */
| engicam-stable/tfa-source-engicam |
<|start_filename|>src/main/java/com/tremolosecurity/unison/jms/ibm/IbmMqConnectionFactory.java<|end_filename|>
/*******************************************************************************
* Copyright 2019 Tremolo Security, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.tremolosecurity.unison.jms.ibm;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSContext;
import javax.jms.JMSException;
import com.ibm.msg.client.jms.JmsConnectionFactory;
import com.ibm.msg.client.jms.JmsFactoryFactory;
import com.ibm.msg.client.wmq.WMQConstants;
/**
* IbmMqConnectionFactory
*/
public class IbmMqConnectionFactory implements ConnectionFactory {
JmsConnectionFactory mqConnectionFactory;
public IbmMqConnectionFactory() throws JMSException {
JmsFactoryFactory ff = JmsFactoryFactory.getInstance(WMQConstants.WMQ_PROVIDER);
mqConnectionFactory = ff.createConnectionFactory();
this.mqConnectionFactory.setIntProperty(WMQConstants.WMQ_CONNECTION_MODE, WMQConstants.WMQ_CM_CLIENT);
this.mqConnectionFactory.setBooleanProperty(WMQConstants.USER_AUTHENTICATION_MQCSP, true);
}
@Override
public Connection createConnection() throws JMSException {
return mqConnectionFactory.createConnection();
}
@Override
public Connection createConnection(String userName, String password) throws JMSException {
return mqConnectionFactory.createConnection(userName, password);
}
@Override
public JMSContext createContext() {
return mqConnectionFactory.createContext();
}
@Override
public JMSContext createContext(String userName, String password) {
return mqConnectionFactory.createContext(userName, password);
}
@Override
public JMSContext createContext(String userName, String password, int sessionMode) {
return mqConnectionFactory.createContext(userName, password, sessionMode);
}
@Override
public JMSContext createContext(int sessionMode) {
return mqConnectionFactory.createContext(sessionMode);
}
public void setHost(String host) throws JMSException {
this.mqConnectionFactory.setStringProperty(WMQConstants.WMQ_HOST_NAME, host);
}
public String getHost() throws JMSException {
return this.mqConnectionFactory.getStringProperty(WMQConstants.WMQ_HOST_NAME);
}
public void setPort(String port) throws NumberFormatException, JMSException {
this.mqConnectionFactory.setIntProperty(WMQConstants.WMQ_PORT, Integer.parseInt(port));
}
public String getPort() throws JMSException {
return Integer.toString(this.mqConnectionFactory.getIntProperty(WMQConstants.WMQ_PORT));
}
public void setQueueManager(String qmgr) throws JMSException {
this.mqConnectionFactory.setStringProperty(WMQConstants.WMQ_QUEUE_MANAGER, qmgr);
}
public String getQueueManager() throws JMSException {
return this.mqConnectionFactory.getStringProperty(WMQConstants.WMQ_QUEUE_MANAGER);
}
public void setApplicationName(String appName) throws JMSException {
this.mqConnectionFactory.setStringProperty(WMQConstants.WMQ_APPLICATIONNAME, appName);
}
public String getApplicationName() throws JMSException {
return this.mqConnectionFactory.getStringProperty(WMQConstants.WMQ_APPLICATIONNAME);
}
public void setChannel(String channel) throws JMSException {
this.mqConnectionFactory.setStringProperty(WMQConstants.WMQ_CHANNEL, channel);
}
public String getChannel() throws JMSException {
return this.mqConnectionFactory.getStringProperty(WMQConstants.WMQ_CHANNEL);
}
} | TremoloSecurity/ibm-mq-connection-factory |
<|start_filename|>Driver Kernel/driver.h<|end_filename|>
#pragma once
#include "../Defines/utils.h"
std::mutex isuse;
class Driver
{
public:
UINT ProcessId;
const bool Init(const BOOL PhysicalMode) {
this->bPhysicalMode = PhysicalMode;
this->hDriver = CreateFileA((("\\\\.\\\PEAuth")), GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, 0, NULL);
if (this->hDriver != INVALID_HANDLE_VALUE) {
if (this->SharedBuffer = VirtualAlloc(0, sizeof(REQUEST_DATA), MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE)) {
UNICODE_STRING RegPath = RTL_CONSTANT_STRING(L"\\Registry\\Machine\\SOFTWARE\\ucflash");
if (!RegistryUtils::WriteRegistry(RegPath, RTL_CONSTANT_STRING(L"xxx"), &this->SharedBuffer, REG_QWORD, 8)) {
return false;
}
PVOID pid = (PVOID)GetCurrentProcessId();
if (!RegistryUtils::WriteRegistry(RegPath, RTL_CONSTANT_STRING(L"xx"), &pid, REG_QWORD, 8)) {
return false;
}
auto OLD_MAGGICCODE = this->MAGGICCODE;
SendRequest(99, 0);
if (this->MAGGICCODE == OLD_MAGGICCODE)
this->MAGGICCODE = (ULONG64)RegistryUtils::ReadRegistry<LONG64>(RegPath, RTL_CONSTANT_STRING(L"xxxx"));
return true;
Microsoft Visual Studio Solution File, Format Version 12.00
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|x64 = Debug|x64
Debug|x86 = Debug|x86
Release|x64 = Release|x64
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{73CABBCE-26CE-4C85-A076-0979F5002604}.Debug|x64.ActiveCfg = Debug|x64
{73CABBCE-26CE-4C85-A076-0979F5002604}.Debug|x64.Build.0 = Debug|x64
{73CABBCE-26CE-4C85-A076-0979F5002604}.Debug|x86.ActiveCfg = Debug|Win32
{73CABBCE-26CE-4C85-A076-0979F5002604}.Debug|x86.Build.0 = Debug|Win32
{73CABBCE-26CE-4C85-A076-0979F5002604}.Release|x64.ActiveCfg = Release|x64
{73CABBCE-26CE-4C85-A076-0979F5002604}.Release|x64.Build.0 = Release|x64
{73CABBCE-26CE-4C85-A076-0979F5002604}.Release|x86.ActiveCfg = Release|Win32
{73CABBCE-26CE-4C85-A076-0979F5002604}.Release|x86.Build.0 = Release|Win32
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {F36DEE84-54D0-4791-A93B-A10DCDB1D5E1}
EndGlobalSection
}
}
return false;
}
const NTSTATUS SendRequest(const UINT type, const PVOID args) {
std::scoped_lock lock(isuse);
REQUEST_DATA req;
NTSTATUS status;
req.MaggicCode = &this->MAGGICCODE;
req.Type = type;
req.Arguments = args;
req.Status = &status;
memcpy(this->SharedBuffer, &req, sizeof(REQUEST_DATA));
FlushFileBuffers(this->hDriver);
return status;
}
NTSTATUS ReadProcessMemory(uint64_t src, void* dest, uint32_t size) {
REQUEST_READ req;
req.ProcessId = ProcessId;
req.Src = src;
req.Dest = dest;
req.Size = size;
req.bPhysicalMem = bPhysicalMode;
return SendRequest(REQUEST_TYPE::READ, &req);
}
NTSTATUS WriteProcessMemory(PVOID src, PVOID dest, DWORD size) {
REQUEST_WRITE req;
req.ProcessId = ProcessId;
req.Src = src;
req.Dest = dest;
req.Size = size;
req.bPhysicalMem = bPhysicalMode;
return SendRequest(REQUEST_TYPE::WRITE, &req);
}
const UINT GetProcessThreadNumByID(DWORD dwPID)
{
HANDLE hProcessSnap = ::CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
if (hProcessSnap == INVALID_HANDLE_VALUE)
return 0;
PROCESSENTRY32 pe32 = { 0 };
pe32.dwSize = sizeof(pe32);
BOOL bRet = ::Process32First(hProcessSnap, &pe32);;
while (bRet)
{
if (pe32.th32ProcessID == dwPID)
{
::CloseHandle(hProcessSnap);
return pe32.cntThreads;
}
bRet = ::Process32Next(hProcessSnap, &pe32);
}
return 0;
}
const UINT GetProcessId(const wchar_t* process_name) {
UINT pid = 0;
DWORD dwThreadCountMax = 0;
// Create toolhelp snapshot.
HANDLE snapshot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
PROCESSENTRY32 process;
ZeroMemory(&process, sizeof(process));
process.dwSize = sizeof(process);
// Walkthrough all processes.
if (Process32First(snapshot, &process))
{
do
{
if (wcsstr(process.szExeFile, process_name))
{
DWORD dwTmpThreadCount = GetProcessThreadNumByID(process.th32ProcessID);
if (dwTmpThreadCount > dwThreadCountMax)
{
dwThreadCountMax = dwTmpThreadCount;
pid = process.th32ProcessID;
break;
}
}
} while (Process32Next(snapshot, &process));
}
CloseHandle(snapshot);
return pid;
}
const bool Attach(const wchar_t* Processname, const wchar_t* Classname = 0) {
if (Classname) {
while (!FindWindowW(Classname, 0)) { Sleep(50); }
}
if (this->ProcessId = this->GetProcessId(Processname))
return true;
return false;
}
const uint64_t GetModuleBase(const wchar_t* ModuleName = 0) {
if (bPhysicalMode) {
REQUEST_MAINBASE req;
uint64_t base = NULL;
req.ProcessId = ProcessId;
req.OutAddress = (PBYTE*)&base;
SendRequest(REQUEST_TYPE::MAINBASE, &req);
return { base };
}
else {
if (!ModuleName)
return { 0 };
REQUEST_MODULE req;
uint64_t base = NULL;
DWORD size = NULL;
req.ProcessId = ProcessId;
req.OutAddress = (PBYTE*)&base;
req.OutSize = &size;
wcscpy_s(req.Module, sizeof(req.Module) / sizeof(req.Module[0]), ModuleName);
SendRequest(REQUEST_TYPE::MODULE, &req);
return { base };
}
}
private:
PVOID SharedBuffer;
HANDLE hDriver;
ULONG64 MAGGICCODE = 0x59002360218c1e2dul;
BOOL bPhysicalMode = FALSE;
typedef enum _REQUEST_TYPE : UINT {
WRITE,
READ,
PROTECT,
ALLOC,
FREE,
MODULE,
MAINBASE,
THREADCALL,
} REQUEST_TYPE;
typedef struct _REQUEST_DATA {
ULONG64* MaggicCode;
UINT Type;
PVOID Arguments;
NTSTATUS* Status;
} REQUEST_DATA, * PREQUEST_DATA;
typedef struct _REQUEST_WRITE {
DWORD ProcessId;
PVOID Dest;
PVOID Src;
DWORD Size;
BOOL bPhysicalMem;
} REQUEST_WRITE, * PREQUEST_WRITE;
typedef struct _REQUEST_READ {
DWORD ProcessId;
void* Dest;
uint64_t Src;
uint32_t Size;
BOOL bPhysicalMem;
} REQUEST_READ, * PREQUEST_READ;
typedef struct _REQUEST_PROTECT {
DWORD ProcessId;
PVOID Address;
DWORD Size;
PDWORD InOutProtect;
} REQUEST_PROTECT, * PREQUEST_PROTECT;
typedef struct _REQUEST_ALLOC {
DWORD ProcessId;
PVOID OutAddress;
DWORD Size;
DWORD Protect;
} REQUEST_ALLOC, * PREQUEST_ALLOC;
typedef struct _REQUEST_FREE {
DWORD ProcessId;
PVOID Address;
} REQUEST_FREE, * PREQUEST_FREE;
typedef struct _REQUEST_MODULE {
DWORD ProcessId;
WCHAR Module[0xFF];
PBYTE* OutAddress;
DWORD* OutSize;
} REQUEST_MODULE, * PREQUEST_MODULE;
typedef struct _REQUEST_MAINBASE {
DWORD ProcessId;
PBYTE* OutAddress;
} REQUEST_MAINBASE, * PREQUEST_MAINBASE;
};
static Driver* driver = new Driver;
template <typename T>
T read(const uintptr_t address)
{
T buffer{ };
driver->ReadProcessMemory(address, &buffer, sizeof(T));
return buffer;
}
template <typename T>
T write(const uintptr_t address, T buffer)
{
driver->WriteProcessMemory((PVOID)&buffer, (PVOID)address, sizeof(T));
return buffer;
}
std::string readwtf(uintptr_t Address, void* Buffer, SIZE_T Size)
{
driver->ReadProcessMemory(Address, Buffer, Size);
char name[255] = { 0 };
memcpy(&name, Buffer, Size);
return std::string(name);
}
uint64_t ReadChain(uint64_t base, const std::vector<uint64_t>& offsets) {
uint64_t result = read<uint64_t>(base + offsets.at(0));
for (int i = 1; i < offsets.size(); i++) {
result = read<uint64_t>(result + offsets.at(i));
}
return result;
}
<|start_filename|>configuration/define.h<|end_filename|>
#define E
ImFont* m_pFont;
DWORD_PTR Uworld;
DWORD_PTR LocalPawn;
DWORD_PTR PlayerState;
DWORD_PTR Localplayer;
DWORD_PTR Rootcomp;
DWORD_PTR relativelocation;
DWORD_PTR PlayerController;
DWORD_PTR Persistentlevel;
DWORD_PTR Ulevel;
Vector3 localactorpos;
Vector3 Localcam;
uint64_t TargetPawn;
int localplayerID;
bool isaimbotting;
bool CrosshairSnapLines = false;
bool team_CrosshairSnapLines;
RECT GameRect = { NULL };
D3DPRESENT_PARAMETERS d3dpp;
DWORD ScreenCenterX;
DWORD ScreenCenterY;
DWORD ScreenCenterZ;
//static void xCreateWindow();
//static void xInitD3d();
//static void xMainLoop();
//static LRESULT CALLBACK WinProc(HWND hWnd, UINT Message, WPARAM wParam, LPARAM lParam);
//extern LRESULT ImGui_ImplWin32_WndProcHandler(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam);
static HWND Window = NULL;
//IDirect3D9Ex* p_Object = NULL;
static LPDIRECT3DDEVICE9 D3dDevice = NULL;
static LPDIRECT3DVERTEXBUFFER9 TriBuf = NULL;
DWORD Menuthread(LPVOID in) {
while (1) {
if (GetAsyncKeyState(VK_INSERT) & 1) {
item.show_menu = !item.show_menu;
}
Sleep(2);
}
}
static HWND get_process_wnd(uint32_t pid) {
std::pair<HWND, uint32_t> params = { 0, pid };
BOOL bResult = EnumWindows([](HWND hwnd, LPARAM lParam) -> BOOL {
auto pParams = (std::pair<HWND, uint32_t>*)(lParam);
uint32_t processId = 0;
if (GetWindowThreadProcessId(hwnd, reinterpret_cast<LPDWORD>(&processId)) && processId == pParams->second) {
SetLastError((uint32_t)-1);
pParams->first = hwnd;
return FALSE;
}
return TRUE;
}, (LPARAM)¶ms);
if (!bResult && GetLastError() == -1 && params.first)
return params.first;
return NULL;
}
ImFont* title;
ImFont* title1;
ImFont* otherfont;
ImFont* tabfont;
ImFont* othertitle;
ImFont* spritefont;
<|start_filename|>Win32/win_utils.h<|end_filename|>
#pragma once
#include <windows.h>
#include <winternl.h>
#include <process.h>
#include <tlhelp32.h>
#include <inttypes.h>
#include <iostream>
#include <stdexcept>
#include <vector>
#include <algorithm>
#include <chrono>
#include <sstream>
#include <d3d9.h>
#pragma comment (lib, "d3d9.lib")
#include "../Imgui/imgui.h"
#include "../Imgui/imgui_impl_dx9.h"
#include "../Imgui/imgui_impl_win32.h"
#include "../Defines\defs.h"
HANDLE DriverHandle;
uint64_t base_address;
HWND hwnd = NULL;
DWORD processID;
static ULONG Width = GetSystemMetrics(SM_CXSCREEN);
static ULONG Height = GetSystemMetrics(SM_CYSCREEN);
int Depth;
#define M_PI 3.14159265358979323846264338327950288419716939937510
typedef struct info_t {
int pid = 0;
DWORD_PTR address;
void* value;
SIZE_T size;
void* data;
} info, * p_info;
class Vector3
{
public:
Vector3() : x(0.f), y(0.f), z(0.f)
{
}
Vector3(float _x, float _y, float _z) : x(_x), y(_y), z(_z)
{
}
~Vector3()
{
}
float x;
float y;
float z;
inline float Dot(Vector3 v)
{
return x * v.x + y * v.y + z * v.z;
}
inline float Distance(Vector3 v)
{
return float(sqrtf(powf(v.x - x, 2.0) + powf(v.y - y, 2.0) + powf(v.z - z, 2.0)));
}
inline double Length() {
return sqrt(x * x + y * y + z * z);
}
//inline FRotator ToFRotator() {
//}
Vector3 operator+(Vector3 v)
{
return Vector3(x + v.x, y + v.y, z + v.z);
}
Vector3 operator-(Vector3 v)
{
return Vector3(x - v.x, y - v.y, z - v.z);
}
Vector3 operator*(float flNum) { return Vector3(x * flNum, y * flNum, z * flNum); }
};
struct FQuat
{
float x;
float y;
float z;
float w;
};
struct FTransform
{
FQuat rot;
Vector3 translation;
char pad[4];
Vector3 scale;
char pad1[4];
D3DMATRIX ToMatrixWithScale()
{
D3DMATRIX m;
m._41 = translation.x;
m._42 = translation.y;
m._43 = translation.z;
float x2 = rot.x + rot.x;
float y2 = rot.y + rot.y;
float z2 = rot.z + rot.z;
float xx2 = rot.x * x2;
float yy2 = rot.y * y2;
float zz2 = rot.z * z2;
m._11 = (1.0f - (yy2 + zz2)) * scale.x;
m._22 = (1.0f - (xx2 + zz2)) * scale.y;
m._33 = (1.0f - (xx2 + yy2)) * scale.z;
float yz2 = rot.y * z2;
float wx2 = rot.w * x2;
m._32 = (yz2 - wx2) * scale.z;
m._23 = (yz2 + wx2) * scale.y;
float xy2 = rot.x * y2;
float wz2 = rot.w * z2;
m._21 = (xy2 - wz2) * scale.y;
m._12 = (xy2 + wz2) * scale.x;
float xz2 = rot.x * z2;
float wy2 = rot.w * y2;
m._31 = (xz2 + wy2) * scale.z;
m._13 = (xz2 - wy2) * scale.x;
m._14 = 0.0f;
m._24 = 0.0f;
m._34 = 0.0f;
m._44 = 1.0f;
return m;
}
};
D3DMATRIX MatrixMultiplication(D3DMATRIX pM1, D3DMATRIX pM2)
{
D3DMATRIX pOut;
pOut._11 = pM1._11 * pM2._11 + pM1._12 * pM2._21 + pM1._13 * pM2._31 + pM1._14 * pM2._41;
pOut._12 = pM1._11 * pM2._12 + pM1._12 * pM2._22 + pM1._13 * pM2._32 + pM1._14 * pM2._42;
pOut._13 = pM1._11 * pM2._13 + pM1._12 * pM2._23 + pM1._13 * pM2._33 + pM1._14 * pM2._43;
pOut._14 = pM1._11 * pM2._14 + pM1._12 * pM2._24 + pM1._13 * pM2._34 + pM1._14 * pM2._44;
pOut._21 = pM1._21 * pM2._11 + pM1._22 * pM2._21 + pM1._23 * pM2._31 + pM1._24 * pM2._41;
pOut._22 = pM1._21 * pM2._12 + pM1._22 * pM2._22 + pM1._23 * pM2._32 + pM1._24 * pM2._42;
pOut._23 = pM1._21 * pM2._13 + pM1._22 * pM2._23 + pM1._23 * pM2._33 + pM1._24 * pM2._43;
pOut._24 = pM1._21 * pM2._14 + pM1._22 * pM2._24 + pM1._23 * pM2._34 + pM1._24 * pM2._44;
pOut._31 = pM1._31 * pM2._11 + pM1._32 * pM2._21 + pM1._33 * pM2._31 + pM1._34 * pM2._41;
pOut._32 = pM1._31 * pM2._12 + pM1._32 * pM2._22 + pM1._33 * pM2._32 + pM1._34 * pM2._42;
pOut._33 = pM1._31 * pM2._13 + pM1._32 * pM2._23 + pM1._33 * pM2._33 + pM1._34 * pM2._43;
pOut._34 = pM1._31 * pM2._14 + pM1._32 * pM2._24 + pM1._33 * pM2._34 + pM1._34 * pM2._44;
pOut._41 = pM1._41 * pM2._11 + pM1._42 * pM2._21 + pM1._43 * pM2._31 + pM1._44 * pM2._41;
pOut._42 = pM1._41 * pM2._12 + pM1._42 * pM2._22 + pM1._43 * pM2._32 + pM1._44 * pM2._42;
pOut._43 = pM1._41 * pM2._13 + pM1._42 * pM2._23 + pM1._43 * pM2._33 + pM1._44 * pM2._43;
pOut._44 = pM1._41 * pM2._14 + pM1._42 * pM2._24 + pM1._43 * pM2._34 + pM1._44 * pM2._44;
return pOut;
}
<|start_filename|>Menu Look/old.h<|end_filename|>
//if (menu_key)
//{
// auto& style = ImGui::GetStyle();
// style.Colors[ImGuiCol_TitleBg] = ImColor(24, 25, 24, 255);
// style.Colors[ImGuiCol_Text] = ImColor(252, 186, 3);
// style.Colors[ImGuiCol_TitleBgActive] = ImColor(24, 25, 24, 255);
// style.Colors[ImGuiCol_TitleBgCollapsed] = ImColor(15, 15, 15, 50);
// style.Colors[ImGuiCol_Border] = ImColor(0, 0, 0, 0);
// style.Colors[ImGuiCol_WindowBg] = ImColor(12, 12, 12, 255);
// style.Colors[ImGuiCol_Button] = ImColor(24, 25, 24, 255);
// style.Colors[ImGuiCol_ButtonActive] = ImColor(44, 44, 44, 255);
// style.Colors[ImGuiCol_ButtonHovered] = ImColor(44, 44, 44, 255);
// style.Colors[ImGuiCol_CheckMark] = ImColor(252, 186, 3);
// style.Colors[ImGuiCol_FrameBg] = ImColor(36, 37, 36, 255);
// style.Colors[ImGuiCol_FrameBgActive] = ImColor(36, 37, 36, 255);
// style.Colors[ImGuiCol_FrameBgHovered] = ImColor(36, 37, 36, 255);
// style.Colors[ImGuiCol_Header] = ImColor(54, 56, 54, 255); // for collapsing headers , etc
// style.Colors[ImGuiCol_HeaderActive] = ImColor(54, 56, 54, 255);
// style.Colors[ImGuiCol_HeaderHovered] = ImColor(54, 56, 54, 255);
// style.Colors[ImGuiCol_ResizeGrip] = ImColor(51, 49, 50, 255); // the resize grip thing bottom right
// style.Colors[ImGuiCol_ResizeGripActive] = ImColor(54, 53, 55); // when you hold it / active
// style.Colors[ImGuiCol_ResizeGripHovered] = ImColor(51, 49, 50, 255); // when you hover over it
// style.Colors[ImGuiCol_SliderGrab] = ImColor(252, 186, 3);
// style.Colors[ImGuiCol_SliderGrabActive] = ImColor(24, 25, 24, 255);
// // 54 , 56, 54 , 255
// style.Colors[ImGuiCol_Border] = ImColor(54, 54, 54);
// style.Colors[ImGuiCol_Separator] = ImColor(54, 54, 54);
// style.Colors[ImGuiCol_SeparatorActive] = ImColor(54, 54, 54);
// style.Colors[ImGuiCol_SeparatorHovered] = ImColor(54, 54, 54);
// static const char* MouseKeys[]{ "Right Mouse","Left Mouse","Control","Shift","Alt","Tab","Mouse3","Mouse4","Z Key","X Key","C Key","V Key" };
// static int KeySelected = 0;
// if (ImGui::Begin((" Bytes.AC"), 0, ImGuiWindowFlags_::ImGuiWindowFlags_NoCollapse | ImGuiWindowFlags_::ImGuiWindowFlags_NoResize | ImGuiWindowFlags_NoScrollbar ))
// ImGui::SetNextWindowSize(ImVec2({ 300, 675 }), ImGuiSetCond_FirstUseEver); {
// ImGui::SetWindowSize(ImVec2(315, 600));
// ImGui::Checkbox(("Enable Aimbot"), &item.Aimbot);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Memory Aimbot - Lock on"));
// ImGui::EndTooltip();
// }
// ImGui::Spacing();
// ImGui::Text((E("Aim Key: ")));
// HotkeyButton(hotkeys::aimkey, ChangeKey, keystatus);
// ImGui::Text("");
// ImGui::Checkbox("Enable Box", &item.Esp_box);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Simple 2D Box, can choose types"));
// ImGui::EndTooltip();
// }
// ImGui::Checkbox(("Enable Muzzle"), &item.Locl_line);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("It will make a line to the player"));
// ImGui::EndTooltip();
// }
// ImGui::Checkbox(("Enable Skeletons"), &item.skeleton);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Simple Skeleton ESP"));
// ImGui::EndTooltip();
// }
// ImGui::Checkbox(("Enable Lines"), &item.Esp_line);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Lines to the enemy's"));
// ImGui::EndTooltip();
// }
// ImGui::Checkbox(("Enable Rainbow "), &item.rainbow_colors);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Rainbow ESP!"));
// ImGui::EndTooltip();
// }
// if (item.Esp_box) {
// ImGui::Combo(("Box Type"), &esp_mode, esp_modes, sizeof(esp_modes) / sizeof(*esp_modes));
// }
// ImGui::Text("");
// ImGui::SliderFloat(("FOV"), &item.AimFOV, 20, 500);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Field of view for aimbot"));
// ImGui::EndTooltip();
// }
// ImGui::SliderFloat(("Smooth X"), &item.Aim_SpeedX, 1, 30);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("X Value of smoothing"));
// ImGui::EndTooltip();
// }
// ImGui::SliderFloat(("Smooth Y"), &item.Aim_SpeedY, 1, 30);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Y Value of smoothing"));
// ImGui::EndTooltip();
// }
// ImGui::Spacing();
// ImGui::Checkbox(("Enable PlayerFly"), &item.playerfly);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Spam jump to fly!"));
// ImGui::EndTooltip();
// }
// ImGui::Checkbox(("Enable Insta-Res"), &item.instant_res);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Hold E!"));
// ImGui::EndTooltip();
// }
// ImGui::Checkbox(("Enable Ads-Air"), &item.aiminair);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Aim in why jumping!"));
// ImGui::EndTooltip();
// }
// ImGui::Checkbox(("Enable Rapid-Fire"), &item.Rapidfire);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("Shoots fast!"));
// ImGui::EndTooltip();
// }
// ImGui::SliderFloat(("Rapid Fire Value"), &item.RapidFireValue, 1, 360);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("How fast you shoot!"));
// ImGui::EndTooltip();
// }
// ImGui::SliderFloat("Res Speed", &item.InstantReviveValue, 0, 10);
// ImGui::SameLine();
// ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
// if (ImGui::IsItemHovered()) {
// ImGui::BeginTooltip();
// ImGui::Text(("How fast you res!"));
// ImGui::EndTooltip();
// }
// }
// ImGui::End();
// ImGui::EndFrame();
// ImGui::GetIO().MouseDrawCursor = 1;
//}
<|start_filename|>configuration/defs.h<|end_filename|>
#pragma warning(push, 0)
#ifndef HEXRAYS_DEFS_H
#define HEXRAYS_DEFS_H
#if defined(__GNUC__)
typedef long long ll;
typedef unsigned long long ull;
#define __int64 long long
#define __int32 int
#define __int16 short
#define __int8 char
#define MAKELL(num) num ## LL
#define FMT_64 "ll"
#elif defined(_MSC_VER)
typedef __int64 ll;
typedef unsigned __int64 ull;
#define MAKELL(num) num ## i64
#define FMT_64 "I64"
#elif defined (__BORLANDC__)
typedef __int64 ll;
typedef unsigned __int64 ull;
#define MAKELL(num) num ## i64
#define FMT_64 "L"
#else
#error "unknown compiler"
#endif
typedef unsigned int uint;
typedef unsigned char uchar;
typedef unsigned short ushort;
typedef unsigned long ulong;
typedef char int8;
typedef signed char sint8;
typedef unsigned char uint8;
typedef short int16;
typedef signed short sint16;
typedef unsigned short uint16;
typedef int int32;
typedef signed int sint32;
typedef unsigned int uint32;
typedef ll int64;
typedef ll sint64;
typedef ull uint64;
// Partially defined types. They are used when the decompiler does not know
// anything about the type except its size.
#define _BYTE uint8
#define _WORD uint16
#define _DWORD uint32
#define _QWORD uint64
#if !defined(_MSC_VER)
#define _LONGLONG __int128
#endif
// Non-standard boolean types. They are used when the decompiler can not use
// the standard "bool" type because of the size mistmatch but the possible
// values are only 0 and 1. See also 'BOOL' type below.
typedef int8 _BOOL1;
typedef int16 _BOOL2;
typedef int32 _BOOL4;
//#ifndef _WINDOWS_
//typedef int8 BYTE;
//typedef int16 WORD;
//typedef int32 DWORD;
//typedef int32 LONG;
//typedef int BOOL; // uppercase BOOL is usually 4 bytes
//#endif
//typedef int64 QWORD;
#ifndef __cplusplus
typedef int bool; // we want to use bool in our C programs
#endif
#define __pure // pure function: always returns the same value, has no
// side effects
// Non-returning function
#if defined(__GNUC__)
#define __noreturn __attribute__((noreturn))
#else
#define __noreturn __declspec(noreturn)
#endif
#ifndef NULL
#define NULL 0
#endif
// Some convenience macros to make partial accesses nicer
#define LAST_IND(x,part_type) (sizeof(x)/sizeof(part_type) - 1)
#if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN
# define LOW_IND(x,part_type) LAST_IND(x,part_type)
# define HIGH_IND(x,part_type) 0
#else
# define HIGH_IND(x,part_type) LAST_IND(x,part_type)
# define LOW_IND(x,part_type) 0
#endif
// first unsigned macros:
//#define LODWORD(x) (*((DWORD*)&(x))) // low dword
//#define HIDWORD(x) (*((DWORD*)&(x)+1))
#define BYTEn(x, n) (*((_BYTE*)&(x)+n))
#define WORDn(x, n) (*((_WORD*)&(x)+n))
#define DWORDn(x, n) (*((_DWORD*)&(x)+n))
#define LOBYTE(x) BYTEn(x,LOW_IND(x,_BYTE))
#define LOWORD(x) WORDn(x,LOW_IND(x,_WORD))
#define LODWORD(x) DWORDn(x,LOW_IND(x,_DWORD))
#define HIBYTE(x) BYTEn(x,HIGH_IND(x,_BYTE))
#define HIWORD(x) WORDn(x,HIGH_IND(x,_WORD))
#define HIDWORD(x) DWORDn(x,HIGH_IND(x,_DWORD))
#define BYTE1(x) BYTEn(x, 1) // byte 1 (counting from 0)
#define BYTE2(x) BYTEn(x, 2)
#define BYTE3(x) BYTEn(x, 3)
#define BYTE4(x) BYTEn(x, 4)
#define BYTE5(x) BYTEn(x, 5)
#define BYTE6(x) BYTEn(x, 6)
#define BYTE7(x) BYTEn(x, 7)
#define BYTE8(x) BYTEn(x, 8)
#define BYTE9(x) BYTEn(x, 9)
#define BYTE10(x) BYTEn(x, 10)
#define BYTE11(x) BYTEn(x, 11)
#define BYTE12(x) BYTEn(x, 12)
#define BYTE13(x) BYTEn(x, 13)
#define BYTE14(x) BYTEn(x, 14)
#define BYTE15(x) BYTEn(x, 15)
#define WORD1(x) WORDn(x, 1)
#define WORD2(x) WORDn(x, 2) // third word of the object, unsigned
#define WORD3(x) WORDn(x, 3)
#define WORD4(x) WORDn(x, 4)
#define WORD5(x) WORDn(x, 5)
#define WORD6(x) WORDn(x, 6)
#define WORD7(x) WORDn(x, 7)
// now signed macros (the same but with sign extension)
#define SBYTEn(x, n) (*((int8*)&(x)+n))
#define SWORDn(x, n) (*((int16*)&(x)+n))
#define SDWORDn(x, n) (*((int32*)&(x)+n))
#define SLOBYTE(x) SBYTEn(x,LOW_IND(x,int8))
#define SLOWORD(x) SWORDn(x,LOW_IND(x,int16))
#define SLODWORD(x) SDWORDn(x,LOW_IND(x,int32))
#define SHIBYTE(x) SBYTEn(x,HIGH_IND(x,int8))
#define SHIWORD(x) SWORDn(x,HIGH_IND(x,int16))
#define SHIDWORD(x) SDWORDn(x,HIGH_IND(x,int32))
#define SBYTE1(x) SBYTEn(x, 1)
#define SBYTE2(x) SBYTEn(x, 2)
#define SBYTE3(x) SBYTEn(x, 3)
#define SBYTE4(x) SBYTEn(x, 4)
#define SBYTE5(x) SBYTEn(x, 5)
#define SBYTE6(x) SBYTEn(x, 6)
#define SBYTE7(x) SBYTEn(x, 7)
#define SBYTE8(x) SBYTEn(x, 8)
#define SBYTE9(x) SBYTEn(x, 9)
#define SBYTE10(x) SBYTEn(x, 10)
#define SBYTE11(x) SBYTEn(x, 11)
#define SBYTE12(x) SBYTEn(x, 12)
#define SBYTE13(x) SBYTEn(x, 13)
#define SBYTE14(x) SBYTEn(x, 14)
#define SBYTE15(x) SBYTEn(x, 15)
#define SWORD1(x) SWORDn(x, 1)
#define SWORD2(x) SWORDn(x, 2)
#define SWORD3(x) SWORDn(x, 3)
#define SWORD4(x) SWORDn(x, 4)
#define SWORD5(x) SWORDn(x, 5)
#define SWORD6(x) SWORDn(x, 6)
#define SWORD7(x) SWORDn(x, 7)
// Helper functions to represent some assembly instructions.
#ifdef __cplusplus
// compile time assertion
#define __CASSERT_N0__(l) COMPILE_TIME_ASSERT_ ## l
#define __CASSERT_N1__(l) __CASSERT_N0__(l)
#define CASSERT(cnd) typedef char __CASSERT_N1__(__LINE__) [(cnd) ? 1 : -1]
// check that unsigned multiplication does not overflow
template<class T> bool is_mul_ok(T count, T elsize)
{
CASSERT((T)(-1) > 0); // make sure T is unsigned
if (elsize == 0 || count == 0)
return true;
return count <= ((T)(-1)) / elsize;
}
// multiplication that saturates (yields the biggest value) instead of overflowing
// such a construct is useful in "operator new[]"
template<class T> bool saturated_mul(T count, T elsize)
{
return is_mul_ok(count, elsize) ? count * elsize : T(-1);
}
#include <stddef.h> // for size_t
// memcpy() with determined behavoir: it always copies
// from the start to the end of the buffer
// note: it copies byte by byte, so it is not equivalent to, for example, rep movsd
inline void* qmemcpy(void* dst, const void* src, size_t cnt)
{
char* out = (char*)dst;
const char* in = (const char*)src;
while (cnt > 0)
{
*out++ = *in++;
--cnt;
}
return dst;
}
// Generate a reference to pair of operands
template<class T> int16 __PAIR__(int8 high, T low) { return (((int16)high) << sizeof(high) * 8) | uint8(low); }
template<class T> int32 __PAIR__(int16 high, T low) { return (((int32)high) << sizeof(high) * 8) | uint16(low); }
template<class T> int64 __PAIR__(int32 high, T low) { return (((int64)high) << sizeof(high) * 8) | uint32(low); }
template<class T> uint16 __PAIR__(uint8 high, T low) { return (((uint16)high) << sizeof(high) * 8) | uint8(low); }
template<class T> uint32 __PAIR__(uint16 high, T low) { return (((uint32)high) << sizeof(high) * 8) | uint16(low); }
template<class T> uint64 __PAIR__(uint32 high, T low) { return (((uint64)high) << sizeof(high) * 8) | uint32(low); }
// rotate left
template<class T> T __ROL__(T value, int count)
{
const uint nbits = sizeof(T) * 8;
if (count > 0)
{
count %= nbits;
T high = value >> (nbits - count);
if (T(-1) < 0)
high &= ~((T(-1) << count));
value <<= count;
value |= high;
}
else
{
count = -count % nbits;
T low = value << (nbits - count);
value >>= count;
value |= low;
}
return value;
}
inline uint8 __ROL1__(uint8 value, int count) { return __ROL__((uint8)value, count); }
inline uint16 __ROL2__(uint16 value, int count) { return __ROL__((uint16)value, count); }
inline uint32 __ROL4__(uint32 value, int count) { return __ROL__((uint32)value, count); }
inline uint64 __ROL8__(uint64 value, int count) { return __ROL__((uint64)value, count); }
inline uint8 __ROR1__(uint8 value, int count) { return __ROL__((uint8)value, -count); }
inline uint16 __ROR2__(uint16 value, int count) { return __ROL__((uint16)value, -count); }
inline uint32 __ROR4__(uint32 value, int count) { return __ROL__((uint32)value, -count); }
inline uint64 __ROR8__(uint64 value, int count) { return __ROL__((uint64)value, -count); }
// carry flag of left shift
template<class T> int8 __MKCSHL__(T value, uint count)
{
const uint nbits = sizeof(T) * 8;
count %= nbits;
return (value >> (nbits - count)) & 1;
}
// carry flag of right shift
template<class T> int8 __MKCSHR__(T value, uint count)
{
return (value >> (count - 1)) & 1;
}
// sign flag
template<class T> int8 __SETS__(T x)
{
if (sizeof(T) == 1)
return int8(x) < 0;
if (sizeof(T) == 2)
return int16(x) < 0;
if (sizeof(T) == 4)
return int32(x) < 0;
return int64(x) < 0;
}
// overflow flag of subtraction (x-y)
template<class T, class U> int8 __OFSUB__(T x, U y)
{
if (sizeof(T) < sizeof(U))
{
U x2 = x;
int8 sx = __SETS__(x2);
return (sx ^ __SETS__(y)) & (sx ^ __SETS__(x2 - y));
}
else
{
T y2 = y;
int8 sx = __SETS__(x);
return (sx ^ __SETS__(y2)) & (sx ^ __SETS__(x - y2));
}
}
// overflow flag of addition (x+y)
template<class T, class U> int8 __OFADD__(T x, U y)
{
if (sizeof(T) < sizeof(U))
{
U x2 = x;
int8 sx = __SETS__(x2);
return ((1 ^ sx) ^ __SETS__(y)) & (sx ^ __SETS__(x2 + y));
}
else
{
T y2 = y;
int8 sx = __SETS__(x);
return ((1 ^ sx) ^ __SETS__(y2)) & (sx ^ __SETS__(x + y2));
}
}
// carry flag of subtraction (x-y)
template<class T, class U> int8 __CFSUB__(T x, U y)
{
int size = sizeof(T) > sizeof(U) ? sizeof(T) : sizeof(U);
if (size == 1)
return uint8(x) < uint8(y);
if (size == 2)
return uint16(x) < uint16(y);
if (size == 4)
return uint32(x) < uint32(y);
return uint64(x) < uint64(y);
}
// carry flag of addition (x+y)
template<class T, class U> int8 __CFADD__(T x, U y)
{
int size = sizeof(T) > sizeof(U) ? sizeof(T) : sizeof(U);
if (size == 1)
return uint8(x) > uint8(x + y);
if (size == 2)
return uint16(x) > uint16(x + y);
if (size == 4)
return uint32(x) > uint32(x + y);
return uint64(x) > uint64(x + y);
}
#else
// The following definition is not quite correct because it always returns
// uint64. The above C++ functions are good, though.
#define __PAIR__(high, low) (((uint64)(high)<<sizeof(high)*8) | low)
// For C, we just provide macros, they are not quite correct.
#define __ROL__(x, y) __rotl__(x, y) // Rotate left
#define __ROR__(x, y) __rotr__(x, y) // Rotate right
#define __CFSHL__(x, y) invalid_operation // Generate carry flag for (x<<y)
#define __CFSHR__(x, y) invalid_operation // Generate carry flag for (x>>y)
#define __CFADD__(x, y) invalid_operation // Generate carry flag for (x+y)
#define __CFSUB__(x, y) invalid_operation // Generate carry flag for (x-y)
#define __OFADD__(x, y) invalid_operation // Generate overflow flag for (x+y)
#define __OFSUB__(x, y) invalid_operation // Generate overflow flag for (x-y)
#endif
// No definition for rcl/rcr because the carry flag is unknown
#define __RCL__(x, y) invalid_operation // Rotate left thru carry
#define __RCR__(x, y) invalid_operation // Rotate right thru carry
#define __MKCRCL__(x, y) invalid_operation // Generate carry flag for a RCL
#define __MKCRCR__(x, y) invalid_operation // Generate carry flag for a RCR
#define __SETP__(x, y) invalid_operation // Generate parity flag for (x-y)
// In the decompilation listing there are some objects declarared as _UNKNOWN
// because we could not determine their types. Since the C compiler does not
// accept void item declarations, we replace them by anything of our choice,
// for example a char:
#define _UNKNOWN char
//#ifdef _MSC_VER
//#define snprintf _snprintf
//#define vsnprintf _vsnprintf
//#endif
#endif // HEXRAYS_DEFS_H
#pragma warning(pop)
<|start_filename|>Menu Look/main.cpp<|end_filename|>
#include <iostream>
#include <Windows.h>
#include "../Win32/win_utils.h"
#include "../Protection/xor.hpp"
#include <dwmapi.h>
#include "Main.h"
#include <vector>
#include "../Driver/driver.h"
#include "../Misc/stdafx.h"
#include "../Defines/define.h"
#include <iostream>
#include <fstream>
#include <windows.h>
#include <Lmcons.h>
#include <iostream>
#include <string>
#include <stdio.h>
#include <chrono>
#include <ctime>
#include <iostream>
#include "../Imgui/imgui_internal.h"
#include <cstdlib>
#include <iostream>
#include <chrono>
#include <random>
#include <Program Files (x86)/Windows Kits/10/Include/10.0.19041.0/ucrt/tchar.h>
#include "main header.h"
namespace offests {
static ULONG64 offset_uworld;
static ULONG64 offest_gname;
static ULONG64 uworld;
static ULONG64 persistent_level;
static ULONG64 game_instance;
static ULONG64 local_players_array;
static ULONG64 player_controller;
static ULONG64 camera_manager;
static ULONG64 rootcomponent;
static ULONG64 Pawn;
static Vector3 relativelocation;
static ULONG64 actor_array;
static ULONG32 actor_count;
}
FTransform GetBoneIndex(DWORD_PTR mesh, int index) {
DWORD_PTR bonearray = read<DWORD_PTR>(mesh + 0x4B0);
if (bonearray == NULL) {
bonearray = read<DWORD_PTR>(mesh + 0x4B0 + 0x10);
}
return read<FTransform>(bonearray + (index * 0x30));
}
Vector3 GetBoneWithRotation(DWORD_PTR mesh, int id) {
FTransform bone = GetBoneIndex(mesh, id);
FTransform ComponentToWorld = read<FTransform>(mesh + 0x1C0);
D3DMATRIX Matrix;
Matrix = MatrixMultiplication(bone.ToMatrixWithScale(), ComponentToWorld.ToMatrixWithScale());
return Vector3(Matrix._41, Matrix._42, Matrix._43);
}
D3DMATRIX Matrix(Vector3 rot, Vector3 origin = Vector3(0, 0, 0)) {
float radPitch = (rot.x * float(M_PI) / 180.f);
float radYaw = (rot.y * float(M_PI) / 180.f);
float radRoll = (rot.z * float(M_PI) / 180.f);
float SP = sinf(radPitch);
float CP = cosf(radPitch);
float SY = sinf(radYaw);
float CY = cosf(radYaw);
float SR = sinf(radRoll);
float CR = cosf(radRoll);
D3DMATRIX matrix;
matrix.m[0][0] = CP * CY;
matrix.m[0][1] = CP * SY;
matrix.m[0][2] = SP;
matrix.m[0][3] = 0.f;
matrix.m[1][0] = SR * SP * CY - CR * SY;
matrix.m[1][1] = SR * SP * SY + CR * CY;
matrix.m[1][2] = -SR * CP;
matrix.m[1][3] = 0.f;
matrix.m[2][0] = -(CR * SP * CY + SR * SY);
matrix.m[2][1] = CY * SR - CR * SP * SY;
matrix.m[2][2] = CR * CP;
matrix.m[2][3] = 0.f;
matrix.m[3][0] = origin.x;
matrix.m[3][1] = origin.y;
matrix.m[3][2] = origin.z;
matrix.m[3][3] = 1.f;
return matrix;
}
extern Vector3 CameraEXT(0, 0, 0);
float FovAngle;
Vector3 ProjectWorldToScreen(Vector3 WorldLocation) {
Vector3 Screenlocation = Vector3(0, 0, 0);
Vector3 Camera;
auto chain69 = read<uintptr_t>(Localplayer + 0xa8);
uint64_t chain699 = read<uintptr_t>(chain69 + 8);
Camera.x = read<float>(chain699 + 0x7E8);
Camera.y = read<float>(Rootcomp + 0x12C);
float test = asin(Camera.x);
float degrees = test * (180.0 / M_PI);
Camera.x = degrees;
if (Camera.y < 0)
Camera.y = 360 + Camera.y;
D3DMATRIX tempMatrix = Matrix(Camera);
Vector3 vAxisX, vAxisY, vAxisZ;
vAxisX = Vector3(tempMatrix.m[0][0], tempMatrix.m[0][1], tempMatrix.m[0][2]);
vAxisY = Vector3(tempMatrix.m[1][0], tempMatrix.m[1][1], tempMatrix.m[1][2]);
vAxisZ = Vector3(tempMatrix.m[2][0], tempMatrix.m[2][1], tempMatrix.m[2][2]);
uint64_t chain = read<uint64_t>(Localplayer + 0x70);
uint64_t chain1 = read<uint64_t>(chain + 0x98);
uint64_t chain2 = read<uint64_t>(chain1 + 0x140);
Vector3 vDelta = WorldLocation - read<Vector3>(chain2 + 0x10);
Vector3 vTransformed = Vector3(vDelta.Dot(vAxisY), vDelta.Dot(vAxisZ), vDelta.Dot(vAxisX));
if (vTransformed.z < 1.f)
vTransformed.z = 1.f;
float zoom = read<float>(chain699 + 0x580);
float FovAngle = 80.0f / (zoom / 1.19f);
float ScreenCenterX = Width / 2;
float ScreenCenterY = Height / 2;
float ScreenCenterZ = Height / 2;
Screenlocation.x = ScreenCenterX + vTransformed.x * (ScreenCenterX / tanf(FovAngle * (float)M_PI / 360.f)) / vTransformed.z;
Screenlocation.y = ScreenCenterY - vTransformed.y * (ScreenCenterX / tanf(FovAngle * (float)M_PI / 360.f)) / vTransformed.z;
Screenlocation.z = ScreenCenterZ - vTransformed.z * (ScreenCenterX / tanf(FovAngle * (float)M_PI / 360.f)) / vTransformed.z;
return Screenlocation;
}
HRESULT DirectXInit(HWND hWnd)
{
if (FAILED(Direct3DCreate9Ex(D3D_SDK_VERSION, &p_Object)))
exit(3);
ZeroMemory(&p_Params, sizeof(p_Params));
p_Params.Windowed = TRUE;
p_Params.SwapEffect = D3DSWAPEFFECT_DISCARD;
p_Params.hDeviceWindow = hWnd;
p_Params.MultiSampleQuality = D3DMULTISAMPLE_NONE;
p_Params.BackBufferFormat = D3DFMT_A8R8G8B8;
p_Params.BackBufferWidth = Width;
p_Params.BackBufferHeight = Height;
p_Params.EnableAutoDepthStencil = TRUE;
p_Params.AutoDepthStencilFormat = D3DFMT_D16;
p_Params.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
if (FAILED(p_Object->CreateDeviceEx(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd, D3DCREATE_HARDWARE_VERTEXPROCESSING, &p_Params, 0, &p_Device)))
{
p_Object->Release();
exit(4);
}
IMGUI_CHECKVERSION();
ImGui::CreateContext();
ImGui::GetIO().Fonts->AddFontDefault();
ImGuiIO& io = ImGui::GetIO(); (void)io;
io.Fonts->AddFontFromFileTTF(E("C:\\Windows\\Fonts\\impact.ttf"), 13.f);
static const ImWchar ranges[] =
{
0x0020, 0x00FF,
0x0400, 0x044F,
0,
};
ImGui_ImplWin32_Init(hWnd);
ImGui_ImplDX9_Init(p_Device);
return S_OK;
}bool IsVec3Valid(Vector3 vec3)
{
return !(vec3.x == 0 && vec3.y == 0 && vec3.z == 0);
}
void SetupWindow()
{
CreateThread(0, 0, (LPTHREAD_START_ROUTINE)SetWindowToTarget, 0, 0, 0);
WNDCLASSEXA wcex = {
sizeof(WNDCLASSEXA),
0,
DefWindowProcA,
0,
0,
nullptr,
LoadIcon(nullptr, IDI_APPLICATION),
LoadCursor(nullptr, IDC_ARROW),
nullptr,
nullptr,
("Discord"),
LoadIcon(nullptr, IDI_APPLICATION)
};
RECT Rect;
GetWindowRect(GetDesktopWindow(), &Rect);
RegisterClassExA(&wcex);
MyWnd = CreateWindowExA(NULL, E("Discord"), E("Discord"), WS_POPUP, Rect.left, Rect.top, Rect.right, Rect.bottom, NULL, NULL, wcex.hInstance, NULL);
SetWindowLong(MyWnd, GWL_EXSTYLE, WS_EX_LAYERED | WS_EX_TRANSPARENT | WS_EX_TOOLWINDOW);
SetLayeredWindowAttributes(MyWnd, RGB(0, 0, 0), 255, LWA_ALPHA);
//SetWindowDisplayAffinity(MyWnd, 1);
MARGINS margin = { -1 };
DwmExtendFrameIntoClientArea(MyWnd, &margin);
ShowWindow(MyWnd, SW_SHOW);
UpdateWindow(MyWnd);
}
Vector3 AimbotCorrection(float bulletVelocity, float bulletGravity, float targetDistance, Vector3 targetPosition, Vector3 targetVelocity) {
Vector3 recalculated = targetPosition;
float gravity = fabs(bulletGravity);
float time = targetDistance / fabs(bulletVelocity);
float bulletDrop = (gravity / 250) * time * time;
recalculated.z += bulletDrop * 120;
recalculated.x += time * (targetVelocity.x);
recalculated.y += time * (targetVelocity.y);
recalculated.z += time * (targetVelocity.z);
return recalculated;
}
void SetMouseAbsPosition(DWORD x, DWORD y)
{
INPUT input = { 0 };
input.type = INPUT_MOUSE;
input.mi.dwFlags = MOUSEEVENTF_MOVE;
input.mi.dx = x;
input.mi.dy = y;
SendInput(1, &input, sizeof(input));
}
static auto move_to(float x, float y) -> void {
float center_x = (ImGui::GetIO().DisplaySize.x / 2);
float center_y = (ImGui::GetIO().DisplaySize.y / 2);
int AimSpeedX = item.Aim_SpeedX; //item.Aim_Speed
int AimSpeedY = item.Aim_SpeedY;
float target_x = 0;
float target_y = 0;
if (x != 0.f)
{
if (x > center_x)
{
target_x = -(center_x - x);
target_x /= item.Aim_SpeedX;
if (target_x + center_x > center_x * 2.f) target_x = 0.f;
}
if (x < center_x)
{
target_x = x - center_x;
target_x /= item.Aim_SpeedX;
if (target_x + center_x < 0.f) target_x = 0.f;
}
}
if (y != 0.f)
{
if (y > center_y)
{
target_y = -(center_y - y);
target_y /= item.Aim_SpeedY;
if (target_y + center_y > center_y * 2.f) target_y = 0.f;
}
if (y < center_y)
{
target_y = y - center_y;
target_y /= item.Aim_SpeedY;
if (target_y + center_y < 0.f) target_y = 0.f;
}
}
SetMouseAbsPosition(static_cast<DWORD>(target_x), static_cast<DWORD>(target_y));
}
double GetCrossDistance(double x1, double y1, double z1, double x2, double y2, double z2) {
return sqrt(pow((x2 - x1), 2) + pow((y2 - y1), 2));
}
#define PI 3.14159265358979323846f
typedef struct
{
DWORD R;
DWORD G;
DWORD B;
DWORD A;
}RGBA;
class Color
{
public:
RGBA red = { 255,0,0,255 };
RGBA Magenta = { 255,0,255,255 };
RGBA yellow = { 255,255,0,255 };
RGBA grayblue = { 128,128,255,255 };
RGBA green = { 128,224,0,255 };
RGBA darkgreen = { 0,224,128,255 };
RGBA brown = { 192,96,0,255 };
RGBA pink = { 255,168,255,255 };
RGBA DarkYellow = { 216,216,0,255 };
RGBA SilverWhite = { 236,236,236,255 };
RGBA purple = { 144,0,255,255 };
RGBA Navy = { 88,48,224,255 };
RGBA skyblue = { 0,136,255,255 };
RGBA graygreen = { 128,160,128,255 };
RGBA blue = { 0,96,192,255 };
RGBA orange = { 255,128,0,255 };
RGBA peachred = { 255,80,128,255 };
RGBA reds = { 255,128,192,255 };
RGBA darkgray = { 96,96,96,255 };
RGBA Navys = { 0,0,128,255 };
RGBA darkgreens = { 0,128,0,255 };
RGBA darkblue = { 0,128,128,255 };
RGBA redbrown = { 128,0,0,255 };
RGBA purplered = { 128,0,128,255 };
RGBA greens = { 0,255,0,255 };
RGBA envy = { 0,255,255,255 };
RGBA black = { 0,0,0,255 };
RGBA gray = { 128,128,128,255 };
RGBA white = { 255,255,255,255 };
RGBA blues = { 30,144,255,255 };
RGBA lightblue = { 135,206,250,160 };
RGBA Scarlet = { 220, 20, 60, 160 };
RGBA white_ = { 255,255,255,200 };
RGBA gray_ = { 128,128,128,200 };
RGBA black_ = { 0,0,0,200 };
RGBA red_ = { 255,0,0,200 };
RGBA Magenta_ = { 255,0,255,200 };
RGBA yellow_ = { 255,255,0,200 };
RGBA grayblue_ = { 128,128,255,200 };
RGBA green_ = { 128,224,0,200 };
RGBA darkgreen_ = { 0,224,128,200 };
RGBA brown_ = { 192,96,0,200 };
RGBA pink_ = { 255,168,255,200 };
RGBA darkyellow_ = { 216,216,0,200 };
RGBA silverwhite_ = { 236,236,236,200 };
RGBA purple_ = { 144,0,255,200 };
RGBA Blue_ = { 88,48,224,200 };
RGBA skyblue_ = { 0,136,255,200 };
RGBA graygreen_ = { 128,160,128,200 };
RGBA blue_ = { 0,96,192,200 };
RGBA orange_ = { 255,128,0,200 };
RGBA pinks_ = { 255,80,128,200 };
RGBA Fuhong_ = { 255,128,192,200 };
RGBA darkgray_ = { 96,96,96,200 };
RGBA Navy_ = { 0,0,128,200 };
RGBA darkgreens_ = { 0,128,0,200 };
RGBA darkblue_ = { 0,128,128,200 };
RGBA redbrown_ = { 128,0,0,200 };
RGBA purplered_ = { 128,0,128,200 };
RGBA greens_ = { 0,255,0,200 };
RGBA envy_ = { 0,255,255,200 };
RGBA glassblack = { 0, 0, 0, 160 };
RGBA GlassBlue = { 65,105,225,80 };
RGBA glassyellow = { 255,255,0,160 };
RGBA glass = { 200,200,200,60 };
RGBA filled = { 0, 0, 0, 150 };
RGBA Plum = { 221,160,221,160 };
RGBA rainbow() {
static float x = 0, y = 0;
static float r = 0, g = 0, b = 0;
if (y >= 0.0f && y < 255.0f) {
r = 255.0f;
g = 0.0f;
b = x;
}
else if (y >= 255.0f && y < 510.0f) {
r = 255.0f - x;
g = 0.0f;
b = 255.0f;
}
else if (y >= 510.0f && y < 765.0f) {
r = 0.0f;
g = x;
b = 255.0f;
}
else if (y >= 765.0f && y < 1020.0f) {
r = 0.0f;
g = 255.0f;
b = 255.0f - x;
}
else if (y >= 1020.0f && y < 1275.0f) {
r = x;
g = 255.0f;
b = 0.0f;
}
else if (y >= 1275.0f && y < 1530.0f) {
r = 255.0f;
g = 255.0f - x;
b = 0.0f;
}
x += item.rainbow_speed; //increase this value to switch colors faster
if (x >= 255.0f)
x = 0.0f;
y += item.rainbow_speed; //increase this value to switch colors faster
if (y > 1530.0f)
y = 0.0f;
return RGBA{ (DWORD)r, (DWORD)g, (DWORD)b, 255 };
}
};
Color Col;
std::string string_To_UTF8(const std::string& str)
{
int nwLen = ::MultiByteToWideChar(CP_ACP, 0, str.c_str(), -1, NULL, 0);
wchar_t* pwBuf = new wchar_t[nwLen + 1];
ZeroMemory(pwBuf, nwLen * 2 + 2);
::MultiByteToWideChar(CP_ACP, 0, str.c_str(), str.length(), pwBuf, nwLen);
int nLen = ::WideCharToMultiByte(CP_UTF8, 0, pwBuf, -1, NULL, NULL, NULL, NULL);
char* pBuf = new char[nLen + 1];
ZeroMemory(pBuf, nLen + 1);
::WideCharToMultiByte(CP_UTF8, 0, pwBuf, nwLen, pBuf, nLen, NULL, NULL);
std::string retStr(pBuf);
delete[]pwBuf;
delete[]pBuf;
pwBuf = NULL;
pBuf = NULL;
return retStr;
}
std::string WStringToUTF8(const wchar_t* lpwcszWString)
{
char* pElementText;
int iTextLen = ::WideCharToMultiByte(CP_UTF8, 0, (LPWSTR)lpwcszWString, -1, NULL, 0, NULL, NULL);
pElementText = new char[iTextLen + 1];
memset((void*)pElementText, 0, (iTextLen + 1) * sizeof(char));
::WideCharToMultiByte(CP_UTF8, 0, (LPWSTR)lpwcszWString, -1, pElementText, iTextLen, NULL, NULL);
std::string strReturn(pElementText);
delete[] pElementText;
return strReturn;
}
std::wstring MBytesToWString(const char* lpcszString)
{
int len = strlen(lpcszString);
int unicodeLen = ::MultiByteToWideChar(CP_ACP, 0, lpcszString, -1, NULL, 0);
wchar_t* pUnicode = new wchar_t[unicodeLen + 1];
memset(pUnicode, 0, (unicodeLen + 1) * sizeof(wchar_t));
::MultiByteToWideChar(CP_ACP, 0, lpcszString, -1, (LPWSTR)pUnicode, unicodeLen);
std::wstring wString = (wchar_t*)pUnicode;
delete[] pUnicode;
return wString;
}
void DrawStrokeText(int x, int y, RGBA* color, const char* str)
{
ImFont a;
std::string utf_8_1 = std::string(str);
std::string utf_8_2 = string_To_UTF8(utf_8_1);
ImGui::GetOverlayDrawList()->AddText(ImVec2(x, y - 1), ImGui::ColorConvertFloat4ToU32(ImVec4(1 / 255.0, 1 / 255.0, 1 / 255.0, 255 / 255.0)), utf_8_2.c_str());
ImGui::GetOverlayDrawList()->AddText(ImVec2(x, y + 1), ImGui::ColorConvertFloat4ToU32(ImVec4(1 / 255.0, 1 / 255.0, 1 / 255.0, 255 / 255.0)), utf_8_2.c_str());
ImGui::GetOverlayDrawList()->AddText(ImVec2(x - 1, y), ImGui::ColorConvertFloat4ToU32(ImVec4(1 / 255.0, 1 / 255.0, 1 / 255.0, 255 / 255.0)), utf_8_2.c_str());
ImGui::GetOverlayDrawList()->AddText(ImVec2(x + 1, y), ImGui::ColorConvertFloat4ToU32(ImVec4(1 / 255.0, 1 / 255.0, 1 / 255.0, 255 / 255.0)), utf_8_2.c_str());
ImGui::GetOverlayDrawList()->AddText(ImVec2(x, y), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), utf_8_2.c_str());
}
void DrawStrokeText2(int x, int y, RGBA* color, const std::string str)
{
ImFont a;
std::string utf_8_1 = std::string(str);
std::string utf_8_2 = string_To_UTF8(utf_8_1);
ImGui::GetOverlayDrawList()->AddText(ImVec2(x, y - 1), ImGui::ColorConvertFloat4ToU32(ImVec4(1 / 255.0, 1 / 255.0, 1 / 255.0, 255 / 255.0)), utf_8_2.c_str());
ImGui::GetOverlayDrawList()->AddText(ImVec2(x, y + 1), ImGui::ColorConvertFloat4ToU32(ImVec4(1 / 255.0, 1 / 255.0, 1 / 255.0, 255 / 255.0)), utf_8_2.c_str());
ImGui::GetOverlayDrawList()->AddText(ImVec2(x - 1, y), ImGui::ColorConvertFloat4ToU32(ImVec4(1 / 255.0, 1 / 255.0, 1 / 255.0, 255 / 255.0)), utf_8_2.c_str());
ImGui::GetOverlayDrawList()->AddText(ImVec2(x + 1, y), ImGui::ColorConvertFloat4ToU32(ImVec4(1 / 255.0, 1 / 255.0, 1 / 255.0, 255 / 255.0)), utf_8_2.c_str());
ImGui::GetOverlayDrawList()->AddText(ImVec2(x, y), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), utf_8_2.c_str());
}
void DrawNewText(int x, int y, RGBA* color, const char* str)
{
ImFont a;
std::string utf_8_1 = std::string(str);
std::string utf_8_2 = string_To_UTF8(utf_8_1);
ImGui::GetOverlayDrawList()->AddText(ImVec2(x, y), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), utf_8_2.c_str());
}
void DrawRect(int x, int y, int w, int h, RGBA* color, int thickness)
{
ImGui::GetOverlayDrawList()->AddRect(ImVec2(x, y), ImVec2(x + w, y + h), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), 0, 0, thickness);
}
void DrawFilledRect(int x, int y, int w, int h, RGBA* color)
{
ImGui::GetOverlayDrawList()->AddRectFilled(ImVec2(x, y), ImVec2(x + w, y + h), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), 0, 0);
}
void DrawLeftProgressBar(int x, int y, int w, int h, int thick, int m_health)
{
int G = (255 * m_health / 100);
int R = 255 - G;
RGBA healthcol = { R, G, 0, 255 };
DrawFilledRect(x - (w / 2) - 3, y, thick, (h)*m_health / 100, &healthcol);
}
void DrawRightProgressBar(int x, int y, int w, int h, int thick, int m_health)
{
int G = (255 * m_health / 100);
int R = 255 - G;
RGBA healthcol = { R, G, 0, 255 };
DrawFilledRect(x + (w / 2) - 25, y, thick, (h)*m_health / 100, &healthcol);
}
void DrawString(float fontSize, int x, int y, RGBA* color, bool bCenter, bool stroke, const char* pText, ...)
{
va_list va_alist;
char buf[1024] = { 0 };
va_start(va_alist, pText);
_vsnprintf_s(buf, sizeof(buf), pText, va_alist);
va_end(va_alist);
std::string text = WStringToUTF8(MBytesToWString(buf).c_str());
if (bCenter)
{
ImVec2 textSize = ImGui::CalcTextSize(text.c_str());
x = x - textSize.x / 2;
y = y - textSize.y;
}
if (stroke)
{
ImGui::GetOverlayDrawList()->AddText(ImGui::GetFont(), fontSize, ImVec2(x + 1, y + 1), ImGui::ColorConvertFloat4ToU32(ImVec4(0, 0, 0, 1)), text.c_str());
ImGui::GetOverlayDrawList()->AddText(ImGui::GetFont(), fontSize, ImVec2(x - 1, y - 1), ImGui::ColorConvertFloat4ToU32(ImVec4(0, 0, 0, 1)), text.c_str());
ImGui::GetOverlayDrawList()->AddText(ImGui::GetFont(), fontSize, ImVec2(x + 1, y - 1), ImGui::ColorConvertFloat4ToU32(ImVec4(0, 0, 0, 1)), text.c_str());
ImGui::GetOverlayDrawList()->AddText(ImGui::GetFont(), fontSize, ImVec2(x - 1, y + 1), ImGui::ColorConvertFloat4ToU32(ImVec4(0, 0, 0, 1)), text.c_str());
}
ImGui::GetOverlayDrawList()->AddText(ImGui::GetFont(), fontSize, ImVec2(x, y), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), text.c_str());
}
void DrawCircleFilled(int x, int y, int radius, RGBA* color)
{
ImGui::GetOverlayDrawList()->AddCircleFilled(ImVec2(x, y), radius, ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)));
}
void DrawCircle(int x, int y, int radius, RGBA* color, int segments)
{
ImGui::GetOverlayDrawList()->AddCircle(ImVec2(x, y), radius, ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), segments);
}
void DrawTriangle(int x1, int y1, int x2, int y2, int x3, int y3, RGBA* color, float thickne)
{
ImGui::GetOverlayDrawList()->AddTriangle(ImVec2(x1, y1), ImVec2(x2, y2), ImVec2(x3, y3), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), thickne);
}
void DrawTriangleFilled(int x1, int y1, int x2, int y2, int x3, int y3, RGBA* color)
{
ImGui::GetOverlayDrawList()->AddTriangleFilled(ImVec2(x1, y1), ImVec2(x2, y2), ImVec2(x3, y3), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)));
}
void DrawLine(int x1, int y1, int x2, int y2, RGBA* color, int thickness)
{
ImGui::GetOverlayDrawList()->AddLine(ImVec2(x1, y1), ImVec2(x2, y2), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)), thickness);
}
void DrawCornerBox(int x, int y, int w, int h, int borderPx, RGBA* color)
{
DrawFilledRect(x + borderPx, y, w / 3, borderPx, color); //top
DrawFilledRect(x + w - w / 3 + borderPx, y, w / 3, borderPx, color); //top
DrawFilledRect(x, y, borderPx, h / 3, color); //left
DrawFilledRect(x, y + h - h / 3 + borderPx * 2, borderPx, h / 3, color); //left
DrawFilledRect(x + borderPx, y + h + borderPx, w / 3, borderPx, color); //bottom
DrawFilledRect(x + w - w / 3 + borderPx, y + h + borderPx, w / 3, borderPx, color); //bottom
DrawFilledRect(x + w + borderPx, y, borderPx, h / 3, color);//right
DrawFilledRect(x + w + borderPx, y + h - h / 3 + borderPx * 2, borderPx, h / 3, color);//right
}
void DrawNormalBox(int x, int y, int w, int h, int borderPx, RGBA* color)
{
DrawFilledRect(x + borderPx, y, w, borderPx, color); //top
DrawFilledRect(x + w - w + borderPx, y, w, borderPx, color); //top
DrawFilledRect(x, y, borderPx, h, color); //left
DrawFilledRect(x, y + h - h + borderPx * 2, borderPx, h, color); //left
DrawFilledRect(x + borderPx, y + h + borderPx, w, borderPx, color); //bottom
DrawFilledRect(x + w - w + borderPx, y + h + borderPx, w, borderPx, color); //bottom
DrawFilledRect(x + w + borderPx, y, borderPx, h, color);//right
DrawFilledRect(x + w + borderPx, y + h - h + borderPx * 2, borderPx, h, color);//right
}
void DrawLine2(const ImVec2& from, const ImVec2& to, uint32_t color, float thickness)
{
float a = (color >> 24) & 0xff;
float r = (color >> 16) & 0xff;
float g = (color >> 8) & 0xff;
float b = (color) & 0xff;
ImGui::GetOverlayDrawList()->AddLine(from, to, ImGui::GetColorU32(ImVec4(r / 255, g / 255, b / 255, a / 255)), thickness);
}
void DrawRectRainbow(int x, int y, int width, int height, float flSpeed, RGBA* color, float& flRainbow)
{
ImDrawList* windowDrawList = ImGui::GetWindowDrawList();
flRainbow += flSpeed;
if (flRainbow > 1.f) flRainbow = 0.f;
for (int i = 0; i < width; i++)
{
float hue = (1.f / (float)width) * i;
hue -= flRainbow;
if (hue < 0.f) hue += 1.f;
windowDrawList->AddRectFilled(ImVec2(x + i, y), ImVec2(width, height), ImGui::ColorConvertFloat4ToU32(ImVec4(color->R / 255.0, color->G / 255.0, color->B / 255.0, color->A / 255.0)));
}
}
typedef struct _FNlEntity {
uint64_t Actor;
int ID;
uint64_t mesh;
}FNlEntity;
std::vector<FNlEntity> entityList;
std::string GetNameFromFName(int key)
{
uint32_t ChunkOffset = (uint32_t)((int)(key) >> 16);
uint16_t NameOffset = (uint16_t)key;
uint64_t NamePoolChunk = read<uint64_t>((uintptr_t)base_address + 0xB6528C0 + ((ChunkOffset + 2) * 8)); // ERROR_NAME_SIZE_EXCEEDED
uint64_t entryOffset = NamePoolChunk + (DWORD)(2 * NameOffset);
uint16_t nameEntry = read<uint16_t>(entryOffset);
int nameLength = nameEntry >> 6;
char buff[1028];
char* v2 = buff; // rdi
unsigned __int16* v3; // rbx
int v4 = nameLength; // ebx
int16 result; // ax
int v6; // edx
int v7; // ecx
int v8; // ecx
__int16 v9; // ax
static DWORD_PTR decryptOffset = NULL;
if (!decryptOffset)
decryptOffset = read<DWORD_PTR>((uintptr_t)base_address + 0xB4F9288);
result = decryptOffset;
if ((uint32_t)nameLength && nameLength > 0)
{
driver->ReadProcessMemory(entryOffset + 2, buff, nameLength);
v6 = 0;
v7 = 38;
do
{
v8 = v6++ | v7;
v9 = v8;
v7 = 2 * v8;
result = ~v9;
*v2 ^= result;
++v2;
} while (v6 < nameLength);
buff[nameLength] = '\0';
return std::string(buff);
}
else
{
return "";
}
}
void cache()
{
while (true) {
std::vector<FNlEntity> tmpList;
Uworld = read<DWORD_PTR>(sdk::module_base + 0xB613240);
DWORD_PTR Gameinstance = read<DWORD_PTR>(Uworld + 0x190);
DWORD_PTR LocalPlayers = read<DWORD_PTR>(Gameinstance + 0x38);
Localplayer = read<DWORD_PTR>(LocalPlayers);
PlayerController = read<DWORD_PTR>(Localplayer + 0x30);
LocalPawn = read<DWORD_PTR>(PlayerController + 0x2B0);
PlayerState = read<DWORD_PTR>(LocalPawn + 0x240);
Rootcomp = read<DWORD_PTR>(LocalPawn + 0x138); //old 130
offests::uworld = read<uint64_t>(sdk::module_base + 0xB613240);
offests::game_instance = read<uint64_t>(offests::uworld + 0x190);
offests::local_players_array = read<uint64_t>(read<uint64_t>(offests::game_instance + 0x38));
offests::player_controller = read<uint64_t>(offests::local_players_array + 0x30);
offests::Pawn = read<uint64_t>(offests::player_controller + 0x2B0);
if (!offests::Pawn)continue;
offests::rootcomponent = read<uint64_t>(offests::Pawn + 0x138);
if (!offests::rootcomponent)continue;
offests::relativelocation = read<Vector3>(offests::rootcomponent + 0x11C);
if (!IsVec3Valid(offests::relativelocation))continue;
relativelocation = read<DWORD_PTR>(Rootcomp + 0x11C);
if (LocalPawn != 0) {
localplayerID = read<int>(LocalPawn + 0x18);
}
Persistentlevel = read<DWORD_PTR>(Uworld + 0x30);
DWORD ActorCount = read<DWORD>(Persistentlevel + 0xA0);
DWORD_PTR AActors = read<DWORD_PTR>(Persistentlevel + 0x98);
for (int i = 0; i < ActorCount; i++) {
uint64_t CurrentActor = read<uint64_t>(AActors + i * 0x8);
int curactorid = read<int>(CurrentActor + 0x18);
if (curactorid == localplayerID || curactorid == localplayerID + 765) {
FNlEntity fnlEntity{ };
fnlEntity.Actor = CurrentActor;
fnlEntity.mesh = read<uint64_t>(CurrentActor + 0x288);
fnlEntity.ID = curactorid;
tmpList.push_back(fnlEntity);
}
}
entityList = tmpList;
Sleep(1);
}
}
void AimAt(DWORD_PTR entity) {
uint64_t currentactormesh = read<uint64_t>(entity + 0x288);
auto rootHead = GetBoneWithRotation(currentactormesh, 98);
if (item.Aim_Prediction) {
float distance = localactorpos.Distance(rootHead) / 250;
uint64_t CurrentActorRootComponent = read<uint64_t>(entity + 0x138);
Vector3 vellocity = read<Vector3>(CurrentActorRootComponent + 0x140);
Vector3 Predicted = AimbotCorrection(30000, -1004, distance, rootHead, vellocity);
Vector3 rootHeadOut = ProjectWorldToScreen(Predicted);
if (rootHeadOut.x != 0 || rootHeadOut.y != 0 || rootHeadOut.z != 0) {
if ((GetCrossDistance(rootHeadOut.x, rootHeadOut.y, rootHeadOut.z, Width / 2, Height / 2, Depth / 2) <= item.AimFOV * 1)) {
move_to(rootHeadOut.x, rootHeadOut.y);
}
}
}
else {
Vector3 rootHeadOut = ProjectWorldToScreen(rootHead);
if (rootHeadOut.x != 0 || rootHeadOut.y != 0 || rootHeadOut.z != 0) {
if ((GetCrossDistance(rootHeadOut.x, rootHeadOut.y, rootHeadOut.z, Width / 2, Height / 2, Depth / 2) <= item.AimFOV * 1)) {
move_to(rootHeadOut.x, rootHeadOut.y);
}
}
}
}
void AimAt2(DWORD_PTR entity) {
uint64_t currentactormesh = read<uint64_t>(entity + 0x288);
auto rootHead = GetBoneWithRotation(currentactormesh, 98);
if (item.Aim_Prediction) {
float distance = localactorpos.Distance(rootHead) / 250;
uint64_t CurrentActorRootComponent = read<uint64_t>(entity + 0x138);
Vector3 vellocity = read<Vector3>(CurrentActorRootComponent + 0x140);
Vector3 Predicted = AimbotCorrection(30000, -1004, distance, rootHead, vellocity);
Vector3 rootHeadOut = ProjectWorldToScreen(Predicted);
if (rootHeadOut.x != 0 || rootHeadOut.y != 0 || rootHeadOut.z != 0) {
if ((GetCrossDistance(rootHeadOut.x, rootHeadOut.y, rootHeadOut.z, Width / 2, Height / 2, Depth / 2) <= item.AimFOV * 1)) {
if (item.Locl_line) {
ImGui::GetOverlayDrawList()->AddLine(ImVec2(Width / 2, Height / 2), ImVec2(rootHeadOut.x, rootHeadOut.y), ImGui::GetColorU32({ item.LockLine[0], item.LockLine[1], item.LockLine[2], 1.0f }), item.Thickness);
}
}
}
}
else {
Vector3 rootHeadOut = ProjectWorldToScreen(rootHead);
if (rootHeadOut.x != 0 || rootHeadOut.y != 0 || rootHeadOut.z != 0) {
if ((GetCrossDistance(rootHeadOut.x, rootHeadOut.y, rootHeadOut.z, Width / 2, Height / 2, Depth / 2) <= item.AimFOV * 1)) {
if (item.Locl_line) {
ImGui::GetOverlayDrawList()->AddLine(ImVec2(Width / 2, Height / 2), ImVec2(rootHeadOut.x, rootHeadOut.y), ImGui::GetColorU32({ item.LockLine[0], item.LockLine[1], item.LockLine[2], 1.0f }), item.Thickness);
}
}
}
}
}
void DrawSkeleton(DWORD_PTR mesh)
{
Vector3 vHeadBone = GetBoneWithRotation(mesh, 98);
Vector3 vHip = GetBoneWithRotation(mesh, 2);
Vector3 vNeck = GetBoneWithRotation(mesh, 66);
Vector3 vUpperArmLeft = GetBoneWithRotation(mesh, 93);
Vector3 vUpperArmRight = GetBoneWithRotation(mesh, 9);
Vector3 vLeftHand = GetBoneWithRotation(mesh, 62);
Vector3 vRightHand = GetBoneWithRotation(mesh, 33);
Vector3 vLeftHand1 = GetBoneWithRotation(mesh, 100);
Vector3 vRightHand1 = GetBoneWithRotation(mesh, 99);
Vector3 vRightThigh = GetBoneWithRotation(mesh, 69);
Vector3 vLeftThigh = GetBoneWithRotation(mesh, 76);
Vector3 vRightCalf = GetBoneWithRotation(mesh, 72);
Vector3 vLeftCalf = GetBoneWithRotation(mesh, 79);
Vector3 vLeftFoot = GetBoneWithRotation(mesh, 85);
Vector3 vRightFoot = GetBoneWithRotation(mesh, 84);
Vector3 vHeadBoneOut = ProjectWorldToScreen(vHeadBone);
Vector3 vHipOut = ProjectWorldToScreen(vHip);
Vector3 vNeckOut = ProjectWorldToScreen(vNeck);
Vector3 vUpperArmLeftOut = ProjectWorldToScreen(vUpperArmLeft);
Vector3 vUpperArmRightOut = ProjectWorldToScreen(vUpperArmRight);
Vector3 vLeftHandOut = ProjectWorldToScreen(vLeftHand);
Vector3 vRightHandOut = ProjectWorldToScreen(vRightHand);
Vector3 vLeftHandOut1 = ProjectWorldToScreen(vLeftHand1);
Vector3 vRightHandOut1 = ProjectWorldToScreen(vRightHand1);
Vector3 vRightThighOut = ProjectWorldToScreen(vRightThigh);
Vector3 vLeftThighOut = ProjectWorldToScreen(vLeftThigh);
Vector3 vRightCalfOut = ProjectWorldToScreen(vRightCalf);
Vector3 vLeftCalfOut = ProjectWorldToScreen(vLeftCalf);
Vector3 vLeftFootOut = ProjectWorldToScreen(vLeftFoot);
Vector3 vRightFootOut = ProjectWorldToScreen(vRightFoot);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vHipOut.x, vHipOut.y), ImVec2(vNeckOut.x, vNeckOut.y), ImColor(255, 0, 255, 255), 2.0f );
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vUpperArmLeftOut.x, vUpperArmLeftOut.y), ImVec2(vNeckOut.x, vNeckOut.y), ImColor(255, 0, 255, 255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vUpperArmRightOut.x, vUpperArmRightOut.y), ImVec2(vNeckOut.x, vNeckOut.y), ImColor(255, 0, 255, 255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vLeftHandOut.x, vLeftHandOut.y), ImVec2(vUpperArmLeftOut.x, vUpperArmLeftOut.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vRightHandOut.x, vRightHandOut.y), ImVec2(vUpperArmRightOut.x, vUpperArmRightOut.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vLeftHandOut.x, vLeftHandOut.y), ImVec2(vLeftHandOut1.x, vLeftHandOut1.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vRightHandOut.x, vRightHandOut.y), ImVec2(vRightHandOut1.x, vRightHandOut1.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vLeftThighOut.x, vLeftThighOut.y), ImVec2(vHipOut.x, vHipOut.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vRightThighOut.x, vRightThighOut.y), ImVec2(vHipOut.x, vHipOut.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vLeftCalfOut.x, vLeftCalfOut.y), ImVec2(vLeftThighOut.x, vLeftThighOut.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vRightCalfOut.x, vRightCalfOut.y), ImVec2(vRightThighOut.x, vRightThighOut.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vLeftFootOut.x, vLeftFootOut.y), ImVec2(vLeftCalfOut.x, vLeftCalfOut.y), ImColor(255,0,255,255), 2.0f);
ImGui::GetOverlayDrawList()->AddLine(ImVec2(vRightFootOut.x, vRightFootOut.y), ImVec2(vRightCalfOut.x, vRightCalfOut.y), ImColor(255,0,255,255), 2.0f);
}
bool Headd = true;
bool Neck = false;
bool Chest = false;
ImDrawList* Rendererrr = ImGui::GetOverlayDrawList();
bool isVisible(uint64_t mesh)
{
float bing = read<float>(mesh + 0x280);
float bong = read<float>(mesh + 0x284);
const float tick = 0.06f;
return bong + tick >= bing;
}
RGBA ESPColorSkill = { 0, 0, 0, 255 };
void DrawESP() {
auto entityListCopy = entityList;
float closestDistance = FLT_MAX;
DWORD_PTR closestPawn = NULL;
DWORD_PTR AActors = read<DWORD_PTR>(Ulevel + 0x98);
uint64_t persislevel = read<uint64_t>(Uworld + 0x30);
uint64_t actors = read<uint64_t>(persislevel + 0x98);
int ActorTeamId = read<int>(0xF50);
int curactorid = read<int>(0x18);
if (curactorid == localplayerID || curactorid == 20328438 || curactorid == 20328753 || curactorid == 9343426 || curactorid == 9875120 || curactorid == 9877254 || curactorid == 22405639 || curactorid == 9874439 || curactorid == 14169230)
if (AActors == (DWORD_PTR)nullptr)
return;
for (unsigned long i = 0; i < entityListCopy.size(); ++i) {
FNlEntity entity = entityListCopy[i];
uint64_t actor = read<uint64_t>(actors + (i * 0x8));
uint64_t CurrentActor = read<uint64_t>(AActors + i * 0x8);
uint64_t CurActorRootComponent = read<uint64_t>(entity.Actor + 0x138);
if (CurActorRootComponent == (uint64_t)nullptr || CurActorRootComponent == -1 || CurActorRootComponent == NULL)
continue;
Vector3 actorpos = read<Vector3>(CurActorRootComponent + 0x11C);
Vector3 actorposW2s = ProjectWorldToScreen(actorpos);
DWORD64 otherPlayerState = read<uint64_t>(entity.Actor + 0x240);
if (otherPlayerState == (uint64_t)nullptr || otherPlayerState == -1 || otherPlayerState == NULL)
continue;
//printf("test5\n");
localactorpos = read<Vector3>(Rootcomp + 0x11C);
Vector3 bone66 = GetBoneWithRotation(entity.mesh, 98);
Vector3 bone0 = GetBoneWithRotation(entity.mesh, 0);
Vector3 top = ProjectWorldToScreen(bone66);
Vector3 chest = ProjectWorldToScreen(bone66);
Vector3 aimbotspot = ProjectWorldToScreen(bone66);
Vector3 bottom = ProjectWorldToScreen(bone0);
Vector3 Head = ProjectWorldToScreen(Vector3(bone66.x - 10, bone66.y, bone66.z + 15));
Vector3 chestnone = GetBoneWithRotation(entity.mesh, 66);
Vector3 chest1 = ProjectWorldToScreen(chestnone);
Vector3 relativelocation = read<Vector3>(Rootcomp + 0x11C);
Vector3 player_position = GetBoneWithRotation(entity.mesh, 0);
Vector3 player_screen = ProjectWorldToScreen(player_position);
Vector3 BoxHead = GetBoneWithRotation(entity.mesh, 98);
Vector3 head_screen = ProjectWorldToScreen(Vector3(BoxHead.x, BoxHead.y - 0.6, BoxHead.z));
float distance = localactorpos.Distance(bone66) / 100.f;
float BoxHeight = (float)(Head.y - bottom.y);
float BoxWidth = BoxHeight / 1.8f;
float CornerHeight = abs(Head.y - bottom.y);
float CornerWidth = BoxHeight * 0.46;
int MyTeamId = read<int>(PlayerState + 0xF50);
int ActorTeamId = read<int>(otherPlayerState + 0xF50);
int curactorid = read<int>(CurrentActor + 0x98);
uint32_t object_id = read<uint32_t>(CurrentActor + 0x18);
std::string Names2 = GetNameFromFName(object_id);
uintptr_t MyTeamIndex, EnemyTeamIndex;
if (item.VisableCheck) {
if (isVisible(entity.mesh)) {
ESPColorSkill = { 0, 255, 0, 255 };
}
else {
ESPColorSkill = { 255, 0, 0, 255 };
}
}
if (MyTeamId != ActorTeamId) {
//if (Names2.find("PlayerPawn") != std::string::npos) {
if (item.chams)
{
uintptr_t MyState = read<uintptr_t>(LocalPawn + 0x240);
if (!MyState) continue;
MyTeamIndex = read<uintptr_t>(MyState + 0xF50);
if (!MyTeamIndex) continue;
uintptr_t SquadID = read<uintptr_t>(MyState + 0x1124);
if (!SquadID) break;
uintptr_t EnemyState = read<uintptr_t>(entity.Actor + 0x240);
if (!EnemyState) continue;
write<uintptr_t>(EnemyState + 0xF50, MyTeamIndex);
write<uintptr_t>(EnemyState + 0x1124, SquadID);
}
if (item.FOVChanger) {
if (LocalPawn) {
uintptr_t Mesh = read<uintptr_t>(LocalPawn + 0x288);
write<Vector3>(Mesh + 0x11C, Vector3(item.FOVChangerValueX, item.FOVChangerValueY, item.FOVChangerValueZ)); //Class Engine.SceneComponent -> RelativeScale3D -> 0x134
}
}
if (item.shield_esp && Names2.find("AthenaSupplyDrop_Llama") != std::string::npos)
{
uintptr_t ItemRootComponent = read<uintptr_t>(LocalPawn + 0x138);
Vector3 ItemPosition = read<Vector3>(ItemRootComponent + 0x11C);
Vector3 LLamaPosition;
ProjectWorldToScreen(ItemPosition + LLamaPosition);
char dist[255];
sprintf(dist, E("Liama"));
DrawString(15, ItemPosition.x, ItemPosition.y, &Col.darkblue, true, true, dist);
}
if (item.npcbotcheck && Names2.find("BP_IOPlayerPawn_Base_C") != std::string::npos)
{
char dist[255];
sprintf(dist, E("[ NPC / BOT ]"));
DrawString(15, bottom.x, bottom.y, &ESPColorSkill, true, true, dist);
}
if (item.AimBone) {
if (aimbones == 0)
{
char dist[64];
sprintf_s(dist, "Current Aimbone: Head\n", ImGui::GetIO().Framerate);
ImGui::GetOverlayDrawList()->AddText(ImVec2(Width / 2, 70), ImGui::GetColorU32({ color.DarkRed[0], color.DarkRed[1], color.DarkRed[2], 4.0f }), dist);
}
if (aimbones == 1)
{
char dist[64];
sprintf_s(dist, "Current Aimbone: Chest\n", ImGui::GetIO().Framerate);
ImGui::GetOverlayDrawList()->AddText(ImVec2(Width / 2, 70), ImGui::GetColorU32({ color.DarkRed[0], color.DarkRed[1], color.DarkRed[2], 4.0f }), dist);
}
if (aimbones == 2)
{
char dist[64];
sprintf_s(dist, "Current Aimbone: Toes\n", ImGui::GetIO().Framerate);
ImGui::GetOverlayDrawList()->AddText(ImVec2(Width / 2, 70), ImGui::GetColorU32({ color.DarkRed[0], color.DarkRed[1], color.DarkRed[2], 4.0f }), dist);
}
}
if (item.Esp_box)
{
if (esp_mode == 0) {
if (visuals::outline)
{
DrawNormalBox(bottom.x - CornerWidth / 2 + 1, Head.y, CornerWidth, CornerHeight, menu::box_thick, &Col.red);
DrawNormalBox(bottom.x - CornerWidth / 2 - 1, Head.y, CornerWidth, CornerHeight, menu::box_thick, &Col.red);
DrawNormalBox(bottom.x - CornerWidth / 2, Head.y + 1, CornerWidth, CornerHeight, menu::box_thick, &Col.red);
DrawNormalBox(bottom.x - CornerWidth / 2, Head.y - 1, CornerWidth, CornerHeight, menu::box_thick, &Col.red);
}
DrawNormalBox(bottom.x - (CornerWidth / 2), Head.y, CornerWidth, CornerHeight, menu::box_thick, &ESPColorSkill);
}
if (esp_mode == 1) {
if (visuals::outline)
{
DrawCornerBox(bottom.x - CornerWidth / 2 + 1, Head.y, CornerWidth, CornerHeight, menu::box_thick, &Col.red);
DrawCornerBox(bottom.x - CornerWidth / 2 - 1, Head.y, CornerWidth, CornerHeight, menu::box_thick, &Col.red);
DrawCornerBox(bottom.x - CornerWidth / 2, Head.y + 1, CornerWidth, CornerHeight, menu::box_thick, &Col.red);
DrawCornerBox(bottom.x - CornerWidth / 2, Head.y - 1, CornerWidth, CornerHeight, menu::box_thick, &Col.red);
}
DrawCornerBox(bottom.x - (CornerWidth / 2), Head.y, CornerWidth, CornerHeight, menu::box_thick, &ESPColorSkill);
}
if (esp_mode == 2) {
DrawFilledRect(bottom.x - (CornerWidth / 2), Head.y, CornerWidth, CornerHeight, &Col.filled);
DrawNormalBox(bottom.x - (CornerWidth / 2), Head.y, CornerWidth, CornerHeight, menu::box_thick, &ESPColorSkill);
if (visuals::outline)
{
DrawNormalBox(bottom.x - CornerWidth / 2 + 1, Head.y, CornerWidth, CornerHeight, menu::box_thick, &Col.darkblue);
DrawNormalBox(bottom.x - CornerWidth / 2 - 1, Head.y, CornerWidth, CornerHeight, menu::box_thick, &Col.darkblue);
DrawNormalBox(bottom.x - CornerWidth / 2, Head.y + 1, CornerWidth, CornerHeight, menu::box_thick, &Col.darkblue);
DrawNormalBox(bottom.x - CornerWidth / 2, Head.y - 1, CornerWidth, CornerHeight, menu::box_thick, &Col.darkblue);
}
}
}
if (item.Esp_line) {
DrawLine((Width / 2), Height, player_screen.x, player_screen.y, &ESPColorSkill, item.Thickness);
}
if (item.Distance_Esp) {
char buf[256];
sprintf(buf, E("Entity"), (int)distance);
DrawString(14, Head.x, Head.y - 10, &Col.darkblue, true, true, buf);
}
if (item.skeleton) {
DrawSkeleton(entity.mesh);
}
if (item.playerfly)
{
FZiplinePawnState ZiplinePawnState = read<FZiplinePawnState>(LocalPawn + 0x18B0);
ZiplinePawnState.bIsZiplining = true;
ZiplinePawnState.AuthoritativeValue = 360.f;
write<FZiplinePawnState>(LocalPawn + 0x18B0, ZiplinePawnState);
}
if (item.freezeplayer) {
write<float>(LocalPawn + 0x9C, 0.0f);
}
else
{
write<float>(LocalPawn + 0x9C, 1.00f);
}
if (item.aiminair) {
write<bool>(LocalPawn + 0x3E81, true);
}
if (item.gliderexploit && GetAsyncKeyState(VK_CONTROL)) {
write<float>(LocalPawn + 0x14DE, 0.02f); //bIsParachuteOpen Offset
write<float>(LocalPawn + 0x14DD, 0.02f); //bIsSkydiving Offset
}
if (item.Rapidfire) {
float a = 0;
float b = 0;
uintptr_t CurrentWeapon = read<uintptr_t>(LocalPawn + 0x5F8); //CurrentWeapon Offset
if (CurrentWeapon) {
a = read<float>(CurrentWeapon + 0x9EC); //LastFireTime Offset
b = read<float>(CurrentWeapon + 0x9F0); //LastFireTimeVerified Offset
write<float>(CurrentWeapon + 0x9EC, a + b - item.RapidFireValue); //LastFireTime Offset
}
}
if (item.instant_res) {
write<float>(LocalPawn + 0x3788, item.InstantReviveValue); //ReviveFromDBNOTime Offset
}
if (item.Aimbot) {
auto dx = aimbotspot.x - (Width / 2);
auto dy = aimbotspot.y - (Height / 2);
auto dist = sqrtf(dx * dx + dy * dy) / 50.0f;
if (dist < item.AimFOV && dist < closestDistance) {
closestDistance = dist;
closestPawn = entity.Actor;
}
}
}
}
if (item.Aimbot) {
if (closestPawn != 0) {
if (item.Aimbot && closestPawn && GetAsyncKeyState(hotkeys::aimkey)) {
AimAt(closestPawn);
if (item.Dynamic_aim) {
item.boneswitch += 1;
if (item.boneswitch == 700) {
item.boneswitch = 0;
}
if (item.boneswitch == 0) {
item.hitboxpos = 0;
}
else if (item.boneswitch == 50) {
item.hitboxpos = 1;
}
else if (item.boneswitch == 100) {
}
else if (item.boneswitch == 150) {
item.hitboxpos = 3;
}
else if (item.boneswitch == 200) {
item.hitboxpos = 4;
}
else if (item.boneswitch == 250) {
item.hitboxpos = 5;
}
else if (item.boneswitch == 300) {
item.hitboxpos = 6;
}
else if (item.boneswitch == 350) {
item.hitboxpos = 7;
}
else if (item.boneswitch == 400) {
item.hitboxpos = 6;
}
else if (item.boneswitch == 450) {
item.hitboxpos = 5;
}
else if (item.boneswitch == 500) {
item.hitboxpos = 4;
}
else if (item.boneswitch == 550) {
item.hitboxpos = 3;
}
else if (item.boneswitch == 600) {
item.hitboxpos = 2;
}
else if (item.boneswitch == 650) {
item.hitboxpos = 1;
}
}
}
else {
isaimbotting = false;
AimAt2(closestPawn);
}
}
}
}
void GetKey() {
if (item.hitboxpos == 0) {
item.hitbox = 98;
}
else if (item.hitboxpos == 1) {
item.hitbox = 66;
}
else if (item.hitboxpos == 2) {
item.hitbox = 5;
}
else if (item.hitboxpos == 3) {
item.hitbox = 2;
}
DrawESP();
}
static int Tab = 0;
void shortcurts()
{
if (Key.IsKeyPushing(VK_INSERT))
{
if (menu_key == false)
{
menu_key = true;
}
else if (menu_key == true)
{
menu_key = false;
}
Sleep(200);
}
}
void render() {
ImGuiIO& io = ImGui::GetIO();
io.IniFilename = nullptr;
ImGui_ImplDX9_NewFrame();
ImGui_ImplWin32_NewFrame();
ImGui::NewFrame();
DrawESP();
if (item.drawfov_circle) {
DrawCircle(Width / 2, Height / 2, float(item.AimFOV), &Col.red, 100);
}
if (item.cross_hair) {
ImGui::GetOverlayDrawList()->AddCircleFilled(ImVec2(ScreenCenterX, ScreenCenterY), 5, ImColor(255, 255, 255, 255));
}
shortcurts();
if (menu_key)
{
ImVec4* colors = ImGui::GetStyle().Colors;
colors[ImGuiCol_Text] = ImVec4(1.00f, 1.00f, 1.00f, 1.00f);
colors[ImGuiCol_TextDisabled] = ImVec4(0.50f, 0.50f, 0.50f, 1.00f);
colors[ImGuiCol_WindowBg] = ImVec4(0.10f, 0.10f, 0.10f, 1.00f);
colors[ImGuiCol_ChildBg] = ImVec4(0.00f, 0.00f, 0.00f, 0.00f);
colors[ImGuiCol_PopupBg] = ImVec4(0.19f, 0.19f, 0.19f, 0.92f);
colors[ImGuiCol_Border] = ImVec4(0.19f, 0.19f, 0.19f, 0.29f);
colors[ImGuiCol_BorderShadow] = ImVec4(0.00f, 0.00f, 0.00f, 0.24f);
colors[ImGuiCol_FrameBg] = ImVec4(0.05f, 0.05f, 0.05f, 0.54f);
colors[ImGuiCol_FrameBgHovered] = ImVec4(0.19f, 0.19f, 0.19f, 0.54f);
colors[ImGuiCol_FrameBgActive] = ImVec4(0.20f, 0.22f, 0.23f, 1.00f);
colors[ImGuiCol_TitleBg] = ImVec4(0.00f, 0.00f, 0.00f, 1.00f);
colors[ImGuiCol_TitleBgActive] = ImVec4(0.06f, 0.06f, 0.06f, 1.00f);
colors[ImGuiCol_TitleBgCollapsed] = ImVec4(0.00f, 0.00f, 0.00f, 1.00f);
colors[ImGuiCol_MenuBarBg] = ImVec4(0.14f, 0.14f, 0.14f, 1.00f);
colors[ImGuiCol_ScrollbarBg] = ImVec4(0.05f, 0.05f, 0.05f, 0.54f);
colors[ImGuiCol_ScrollbarGrab] = ImVec4(0.34f, 0.34f, 0.34f, 0.54f);
colors[ImGuiCol_ScrollbarGrabHovered] = ImVec4(0.40f, 0.40f, 0.40f, 0.54f);
colors[ImGuiCol_ScrollbarGrabActive] = ImVec4(0.56f, 0.56f, 0.56f, 0.54f);
colors[ImGuiCol_CheckMark] = ImVec4(0.33f, 0.67f, 0.86f, 1.00f);
colors[ImGuiCol_SliderGrab] = ImVec4(0.34f, 0.34f, 0.34f, 0.54f);
colors[ImGuiCol_SliderGrabActive] = ImVec4(0.56f, 0.56f, 0.56f, 0.54f);
colors[ImGuiCol_Button] = ImVec4(0.05f, 0.05f, 0.05f, 0.54f);
colors[ImGuiCol_ButtonHovered] = ImVec4(0.19f, 0.19f, 0.19f, 0.54f);
colors[ImGuiCol_ButtonActive] = ImVec4(0.20f, 0.22f, 0.23f, 1.00f);
colors[ImGuiCol_Header] = ImVec4(0.00f, 0.00f, 0.00f, 0.52f);
colors[ImGuiCol_HeaderHovered] = ImVec4(0.00f, 0.00f, 0.00f, 0.36f);
colors[ImGuiCol_HeaderActive] = ImVec4(0.20f, 0.22f, 0.23f, 0.33f);
colors[ImGuiCol_Separator] = ImVec4(0.28f, 0.28f, 0.28f, 0.29f);
colors[ImGuiCol_SeparatorHovered] = ImVec4(0.44f, 0.44f, 0.44f, 0.29f);
colors[ImGuiCol_SeparatorActive] = ImVec4(0.40f, 0.44f, 0.47f, 1.00f);
colors[ImGuiCol_ResizeGrip] = ImVec4(0.28f, 0.28f, 0.28f, 0.29f);
colors[ImGuiCol_ResizeGripHovered] = ImVec4(0.44f, 0.44f, 0.44f, 0.29f);
colors[ImGuiCol_ResizeGripActive] = ImVec4(0.40f, 0.44f, 0.47f, 1.00f);
colors[ImGuiCol_Tab] = ImVec4(0.00f, 0.00f, 0.00f, 0.52f);
colors[ImGuiCol_TabHovered] = ImVec4(0.14f, 0.14f, 0.14f, 1.00f);
colors[ImGuiCol_TabActive] = ImVec4(0.20f, 0.20f, 0.20f, 0.36f);
colors[ImGuiCol_TabUnfocused] = ImVec4(0.00f, 0.00f, 0.00f, 0.52f);
colors[ImGuiCol_TabUnfocusedActive] = ImVec4(0.14f, 0.14f, 0.14f, 1.00f);
colors[ImGuiCol_PlotLines] = ImVec4(1.00f, 0.00f, 0.00f, 1.00f);
colors[ImGuiCol_PlotLinesHovered] = ImVec4(1.00f, 0.00f, 0.00f, 1.00f);
colors[ImGuiCol_PlotHistogram] = ImVec4(1.00f, 0.00f, 0.00f, 1.00f);
colors[ImGuiCol_PlotHistogramHovered] = ImVec4(1.00f, 0.00f, 0.00f, 1.00f);
colors[ImGuiCol_TextSelectedBg] = ImVec4(0.20f, 0.22f, 0.23f, 1.00f);
colors[ImGuiCol_DragDropTarget] = ImVec4(0.33f, 0.67f, 0.86f, 1.00f);
colors[ImGuiCol_NavHighlight] = ImVec4(1.00f, 0.00f, 0.00f, 1.00f);
colors[ImGuiCol_NavWindowingHighlight] = ImVec4(1.00f, 0.00f, 0.00f, 0.70f);
colors[ImGuiCol_NavWindowingDimBg] = ImVec4(1.00f, 0.00f, 0.00f, 0.20f);
colors[ImGuiCol_ModalWindowDimBg] = ImVec4(1.00f, 0.00f, 0.00f, 0.35f);
ImGuiStyle& style = ImGui::GetStyle();
style.WindowPadding = ImVec2(8.00f, 8.00f);
style.FramePadding = ImVec2(5.00f, 2.00f);
style.ItemSpacing = ImVec2(6.00f, 6.00f);
style.ItemInnerSpacing = ImVec2(6.00f, 6.00f);
style.TouchExtraPadding = ImVec2(0.00f, 0.00f);
style.IndentSpacing = 25;
style.ScrollbarSize = 15;
style.GrabMinSize = 10;
style.WindowBorderSize = 1;
style.ChildBorderSize = 1;
style.PopupBorderSize = 1;
style.FrameBorderSize = 1;
style.TabBorderSize = 1;
style.WindowRounding = 7;
style.ChildRounding = 4;
style.FrameRounding = 3;
style.PopupRounding = 4;
style.ScrollbarRounding = 9;
style.GrabRounding = 3;
style.TabRounding = 4;
if (ImGui::Begin(("##lol"), 0, ImGuiWindowFlags_::ImGuiWindowFlags_NoCollapse | ImGuiWindowFlags_::ImGuiWindowFlags_NoResize | ImGuiWindowFlags_NoScrollbar | ImGuiWindowFlags_NoTitleBar))
ImGui::SetNextWindowSize(ImVec2({ 300, 675 }), ImGuiSetCond_FirstUseEver); {
ImGui::SetWindowSize({ 500, 350 });
static int MenuTab;
ImGui::SetWindowFontScale(1.3f);
ImGui::Columns(2);
ImGui::SetColumnOffset(1, 130);
{
if (ImGui::Button("Aimbot", ImVec2(120 - 5, 30)))
{
MenuTab = 0;
}
if (ImGui::ButtonEx("Visuals", ImVec2(120 - 5, 30)))
{
MenuTab = 1;
}
if (ImGui::ButtonEx("Exploits", ImVec2(120 - 5, 30)))
{
MenuTab = 2;
}
ImGui::SetCursorPosY(ImGui::GetWindowHeight() - 30);
ImGui::Text("", 50, 1, false);
}
ImGui::NextColumn();
{
if (MenuTab == 0) {
ImGui::Checkbox(("Enable Aimbot"), &item.Aimbot);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Memory Aimbot - Lock on"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Draw FOV"), &item.drawfov_circle);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Makes the FOV seeable"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Draw Crosshair"), &item.cross_hair);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Draws a crosshair"));
ImGui::EndTooltip();
}
ImGui::Spacing();
ImGui::Text((E("Aim Key: ")));
HotkeyButton(hotkeys::aimkey, ChangeKey, keystatus);
ImGui::Spacing();
ImGui::SliderFloat(("FOV"), &item.AimFOV, 20, 500);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Field of view for aimbot"));
ImGui::EndTooltip();
}
ImGui::SliderFloat(("Smoothing"), &item.Aim_SpeedX, 1, 30);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text((" Value of smoothing"));
ImGui::EndTooltip();
}
}
if (MenuTab == 1) {
ImGui::Checkbox("Box", &item.Esp_box);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Simple 2D Box, can choose types"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("AimLine"), &item.Locl_line);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("It will make a line to the player"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Skeleton"), &item.skeleton);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Simple Skeleton ESP"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Snaplines"), &item.Esp_line);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Lines to the enemy's"));
ImGui::EndTooltip();
}
ImGui::Checkbox("Chams", &item.chams);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("This will draw blue chams!"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Aimbone"), &item.AimBone);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Changes Aimbone"));
ImGui::EndTooltip();
}
if (item.Esp_box) {
ImGui::Combo(("Box"), &esp_mode, esp_modes, sizeof(esp_modes) / sizeof(*esp_modes));
}
if (item.AimBone) {
ImGui::Combo(("Aimbone"), &aimbones, aimbone, sizeof(aimbone) / sizeof(*aimbone));
}
}
if (MenuTab == 2) {
ImGui::Spacing();
ImGui::Checkbox(("PlayerFly"), &item.playerfly);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Spam jump to fly!"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Insta-Res"), &item.instant_res);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Hold E!"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Ads-Air"), &item.aiminair);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Aim in why jumping!"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Enable Rapid-Fire"), &item.Rapidfire);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Shoots fast!"));
ImGui::EndTooltip();
}
ImGui::Checkbox(("Enable FOV-Changer"), &item.FOVChanger);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Changes Your FOV"));
ImGui::EndTooltip();
}
ImGui::SliderFloat("FOV X", &item.FOVChangerValueX, 0, 10);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Changes X Value Of FOV"));
ImGui::EndTooltip();
}
ImGui::SliderFloat("FOV Y", &item.FOVChangerValueY, 0, 10);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Changes Y Value Of FOV"));
ImGui::EndTooltip();
}
ImGui::SliderFloat("FOV Z", &item.FOVChangerValueZ, 0, 10);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("Changes Z Value Of FOV"));
ImGui::EndTooltip();
}
ImGui::SliderFloat("Res Speed", &item.InstantReviveValue, 0, 10);
ImGui::SameLine();
ImGui::TextColored(ImColor(255, 255, 255, 255), ("[?]"));
if (ImGui::IsItemHovered()) {
ImGui::BeginTooltip();
ImGui::Text(("How fast you res!"));
ImGui::EndTooltip();
}
}
if (MenuTab == 3) {
ImGui::ColorPicker4(("Fov Circle"), item.DrawFOVCircle, ImGuiColorEditFlags_NoInputs);
ImGui::ColorPicker4(("Box Esp"), item.Espbox, ImGuiColorEditFlags_NoInputs);
ImGui::ColorPicker4(("Corner Box"), item.BoxCornerESP, ImGuiColorEditFlags_NoInputs);
ImGui::ColorPicker4(("Snapline Esp"), item.LineESP, ImGuiColorEditFlags_NoInputs);
}
if (MenuTab == 4) {
ImGui::SetCursorPos({ 175.f,55.f });
ImGui::Text(("Intended For: Bytes.AC"));
ImGui::SetCursorPos({ 175.f,75.f });
ImGui::Text(("Overlay Frametime: %.1f FPS"), ImGui::GetIO().Framerate);
ImGui::SetCursorPos({ 175.f,95.f });
ImGui::TextColored(ImColor(0, 255, 55), "Status: Undetected");
ImGui::SetCursorPos({ 175.f,115.f });
ImGui::Text(("Build Version: 2.10"));
ImGui::SetCursorPos({ 175.f,135.f });
ImGui::Text(("Developer(s): Twixy#1337, wish#1111, yully"));
}
}
}
ImGui::End();
ImGui::EndFrame();
ImGui::GetIO().MouseDrawCursor = 1;
}
else {
ImGui::GetIO().MouseDrawCursor = 0;
}
ImGui::EndFrame();
p_Device->SetRenderState(D3DRS_ZENABLE, false);
p_Device->SetRenderState(D3DRS_ALPHABLENDENABLE, false);
p_Device->SetRenderState(D3DRS_SCISSORTESTENABLE, false);
p_Device->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_ARGB(0, 0, 0, 0), 1.0f, 0);
if (p_Device->BeginScene() >= 0)
{
ImGui::Render();
ImGui_ImplDX9_RenderDrawData(ImGui::GetDrawData());
p_Device->EndScene();
}
HRESULT result = p_Device->Present(NULL, NULL, NULL, NULL);
if (result == D3DERR_DEVICELOST && p_Device->TestCooperativeLevel() == D3DERR_DEVICENOTRESET)
{
ImGui_ImplDX9_InvalidateDeviceObjects();
p_Device->Reset(&p_Params);
ImGui_ImplDX9_CreateDeviceObjects();
}
}
WPARAM MainLoop()
{
static RECT old_rc;
ZeroMemory(&Message, sizeof(MSG));
while (Message.message != WM_QUIT)
{
if (PeekMessage(&Message, MyWnd, 0, 0, PM_REMOVE))
{
TranslateMessage(&Message);
DispatchMessage(&Message);
}
HWND hwnd_active = GetForegroundWindow();
if (GetAsyncKeyState(0x23) & 1)
exit(8);
if (hwnd_active == GameWnd) {
HWND hwndtest = GetWindow(hwnd_active, GW_HWNDPREV);
SetWindowPos(MyWnd, hwndtest, 0, 0, 0, 0, SWP_NOMOVE | SWP_NOSIZE);
}
RECT rc;
POINT xy;
ZeroMemory(&rc, sizeof(RECT));
ZeroMemory(&xy, sizeof(POINT));
GetClientRect(GameWnd, &rc);
ClientToScreen(GameWnd, &xy);
rc.left = xy.x;
rc.top = xy.y;
ImGuiIO& io = ImGui::GetIO();
io.ImeWindowHandle = GameWnd;
io.DeltaTime = 1.0f / 60.0f;
POINT p;
GetCursorPos(&p);
io.MousePos.x = p.x - xy.x;
io.MousePos.y = p.y - xy.y;
if (GetAsyncKeyState(0x1)) {
io.MouseDown[0] = true;
io.MouseClicked[0] = true;
io.MouseClickedPos[0].x = io.MousePos.x;
io.MouseClickedPos[0].x = io.MousePos.y;
}
else
io.MouseDown[0] = false;
if (rc.left != old_rc.left || rc.right != old_rc.right || rc.top != old_rc.top || rc.bottom != old_rc.bottom)
{
old_rc = rc;
Width = rc.right;
Height = rc.bottom;
p_Params.BackBufferWidth = Width;
p_Params.BackBufferHeight = Height;
SetWindowPos(MyWnd, (HWND)0, xy.x, xy.y, Width, Height, SWP_NOREDRAW);
p_Device->Reset(&p_Params);
}
render();
}
ImGui_ImplDX9_Shutdown();
ImGui_ImplWin32_Shutdown();
ImGui::DestroyContext();
CleanuoD3D();
DestroyWindow(MyWnd);
return Message.wParam;
}
LRESULT CALLBACK WinProc(HWND hWnd, UINT Message, WPARAM wParam, LPARAM lParam)
{
if (ImGui_ImplWin32_WndProcHandler(hWnd, Message, wParam, lParam))
return true;
switch (Message)
{
case WM_DESTROY:
CleanuoD3D();
PostQuitMessage(0);
exit(4);
break;
case WM_SIZE:
if (p_Device != NULL && wParam != SIZE_MINIMIZED)
{
ImGui_ImplDX9_InvalidateDeviceObjects();
p_Params.BackBufferWidth = LOWORD(lParam);
p_Params.BackBufferHeight = HIWORD(lParam);
HRESULT hr = p_Device->Reset(&p_Params);
if (hr == D3DERR_INVALIDCALL)
IM_ASSERT(0);
ImGui_ImplDX9_CreateDeviceObjects();
}
break;
default:
return DefWindowProc(hWnd, Message, wParam, lParam);
break;
}
return 0;
}
void CleanuoD3D()
{
if (p_Device != NULL)
{
p_Device->EndScene();
p_Device->Release();
}
if (p_Object != NULL)
{
p_Object->Release();
}
}
void SetWindowToTarget()
{
while (true)
{
GameWnd = get_process_wnd(sdk::process_id);
if (GameWnd)
{
ZeroMemory(&GameRect, sizeof(GameRect));
GetWindowRect(GameWnd, &GameRect);
Width = GameRect.right - GameRect.left;
Height = GameRect.bottom - GameRect.top;
DWORD dwStyle = GetWindowLong(GameWnd, GWL_STYLE);
if (dwStyle & WS_BORDER)
{
GameRect.top += 32;
Height -= 39;
}
ScreenCenterX = Width / 2;
ScreenCenterY = Height / 2;
MoveWindow(MyWnd, GameRect.left, GameRect.top, Width, Height, true);
}
}
}
int main() {
//std::thread anti(debug);
//SetConsoleTitleA(" Vality ");
//printf((" Connecting "));
//Sleep(1000);
//randomize();
if (driver->Init(FALSE)) {
printf(("Success!\n"));
Sleep(1000);
driver->Attach((L"FortniteClient-Win64-Shipping.exe"));
SetupWindow();
DirectXInit(MyWnd);
sdk::process_id = driver->GetProcessId((L"FortniteClient-Win64-Shipping.exe"));
sdk::module_base = driver->GetModuleBase((L"FortniteClient-Win64-Shipping.exe"));
printf(("FortniteClient-Win64-Shipping.exe :0x%llX\n"), sdk::module_base);
std::cout << GetNameFromFName;
HANDLE handle = CreateThread(nullptr, NULL, reinterpret_cast<LPTHREAD_START_ROUTINE>(cache), nullptr, NULL, nullptr);
//CloseHandle(handle);
MainLoop();
}
return 0;
printf(("Failed!\n"));
system(("pause"));
return 1;
}
<|start_filename|>Menu Look/main.h<|end_filename|>
#pragma once
#include <vector>-
static const char* esp_modes[]{ "Box Normal", "Box Cornered", "Box Filled", "3d Box" };
static const char* aimbone[]{ "Head", "Chest", "Toes" };
static int esp_mode = 0;
static int aimbones = 0;
static int radarstyle;
struct {
// Basic colors: ========================================================
float Black[3];
float RGBRed[3] = { 1.0f, 0.0f, 0.0f };
float RGBYelllow[3] = { 1.0f, 1.0f, 0.0f };
float RGBGreen[3] = { 0.0f, 1.0f, 0.0f };
float RGBBlue[3] = { 0.0f, 0.0f, 1.0f };
float CMYKRed[3] = { 0.92f, 0.10f, 0.14f };
float CMYKYellow[3] = { 1.0f, 0.94f, 0.0f };
float CMYKGreen[3] = { 0.0f, 0.65f, 3.17f };
float CMYKBlue[3] = { 0.18f, 0.19f, 0.57f };
float PastelRed[3] = { 0.96f, 0.58f, 0.47f };
float PastelRedOrange[3] = { 0.97f, 0.67f, 0.50f };
float PastelYellowOrange[3] = { 0.99f, 0.77f, 0.53f };
float PastelYellow[3] = { 1.0f, 0.96f, 0.6f };
float PastelPeaGreen[3] = { 0.76f, 0.87f, 0.60f };
float PastelYellowGreen[3] = { 0.63f, 0.82f, 0.61f };
float PastelGreen[3] = { 0.50f, 0.79f, 0.61f };
float PastelGreenCyan[3] = { 0.47f, 0.8f, 0.78f };
float PastelCyan[3] = { 0.42f, 0.81f, 0.96f };
float PastelCyanBlue[3] = { 0.49f, 0.65f, 0.85f };
float PastelBlue[3] = { 0.51f, 0.57f, 0.79f };
float PastelBlueViolet[3] = { 0.52f, 0.50f, 0.74f };
float PastelViolet[3] = { 0.63f, 0.52f, 0.74f };
float PastelVioletMagenta[3] = { 0.74f, 0.54f, 0.74f };
float PastelMagenta[3] = { 0.95f, 0.60f, 0.75f };
float PastelMagentaRed[3] = { 0.96f, 0.59f, 0.61f };
float LightRed[3] = { 0.94f, 0.42f, 0.30f };
float LightRedOrange[3] = { 0.96f, 0.55f, 0.33f };
float LightYellowOrange[3] = { 0.98f, 0.68f, 0.36f };
float LightYellow[3] = { 1.0f, 0.96f, 0.40f };
float LightPeaGreen[3] = { 0.67f, 0.82f, 0.45f };
float LightYellowGreen[3] = { 0.48f, 0.77f, 0.46f };
float LightGreen[3] = { 0.23f, 0.72f, 0.47f };
float LightGreenCyan[3] = { 0.10f, 0.73f, 0.70f };
float LightCyan[3] = { 0.0f, 0.74f, 0.95f };
float LightCyanBlue[3] = { 0.26f, 0.54f, 0.79f };
float LightBlue[3] = { 0.33f, 0.45f, 0.72f };
float LightBlueViolet[3] = { 0.37f, 0.36f, 0.65f };
float LightViolet[3] = { 0.52f, 0.37f, 0.65f };
float LightVioletMagenta[3] = { 0.65f, 0.39f, 0.65f };
float LightMagenta[3] = { 0.94f, 0.43f, 0.66f };
float LightMagentaRed[3] = { 0.94f, 0.42f, 0.49f };
float Red[3] = { 0.92f, 0.10f, 0.14f };
float RedOrange[3] = { 0.94f, 0.39f, 0.13f };
float YellowOrange[3] = { 0.96f, 0.58f, 0.11f };
float Yellow[3] = { 1.0f, 0.94f, 0.0f };
float PeaGreen[3] = { 0.55f, 0.77f, 0.24f };
float YellowGreen[3] = { 0.22f, 0.70f, 0.29f };
float Green[3] = { 0.0f, 0.65f, 0.31f };
float GreenCyan[3] = { 0.0f, 0.66f, 0.61f };
float Cyan[3] = { 0.0f, 0.68f, 0.93f };
float CyanBlue[3] = { 0.0f, 0.44f, 0.34f };
float Blue[3] = { 0.0f, 0.32f, 0.65f };
float BlueViolet[3] = { 0.19f, 0.19f, 0.57f };
float Violet[3] = { 0.18f, 0.19f, 0.57f };
float VioletMagenta[3] = { 0.57f, 0.15f, 5.63f };
float Magenta[3] = { 0.92f, 0.0f, 0.54f };
float MagentaRed[3] = { 0.92f, 0.07f, 0.35f };
float DarkRed[3] = { 0.61f, 0.04f, 0.05f };
float DarkROrange[3] = { 0.62f, 0.25f, 0.05f };
float DarkYellowOrange[3] = { 0.53f, 0.38f, 0.03f };
float DarkYellow[3] = { 0.67f, 0.62f, 0.0f };
float DarkPeaGreen[3] = { 0.34f, 0.52f, 0.15f };
float DarkYellowGreen[3] = { 0.09f, 0.48f, 0.18f };
float DarkGreen[3] = { 0.0f, 0.44f, 0.21f };
float DarkGreenCyan[3] = { 0.0f, 0.45f, 0.41f };
float DarkCyan[3] = { 0.0f, 0.46f, 0.63f };
float DarkCyanBlue[3] = { 0.0f, 0.29f, 0.50f };
float DarkBlue[3] = { 0.0f, 0.20f, 0.44f };
float DarkBlueViolet[3] = { 0.10f, 0.07f, 0.39f };
float DarkViolet[3] = { 0.26f, 0.05f, 0.38f };
float DarkVioletMagenta[3] = { 0.38f, 0.01f, 0.37f };
float DarkMagenta[3] = { 0.61f, 0.0f, 0.36f };
float DarkMagentaRed[3] = { 0.61f, 0.0f, 0.22f };
float DarkerRed[3] = { 0.47f, 0.0f, 0.0f };
float DarkerROrange[3] = { 0.48f, 0.18f, 0.0f };
float DarkerYellowOrange[3] = { 0.49f, 0.28f, 0.0f };
float DarkerYellow[3] = { 0.50f, 0.48f, 0.0f };
float DarkerPeaGreen[3] = { 0.25f, 0.4f, 0.09f };
float DarkerYellowGreen[3] = { 0.0f, 0.36f, 0.12f };
float DarkerGreen[3] = { 0.0f, 0.34f, 0.14f };
float DarkerGreenCyan[3] = { 0.0f, 0.34f, 0.32f };
float DarkerCyan[3] = { 0.0f, 0.35f, 0.49f };
float DarkerCyanBlue[3] = { 0.0f, 0.21f, 0.38f };
float DarkerBlue[3] = { 0.0f, 0.12f, 0.34f };
float DarkerBlueViolet[3] = { 0.05f, 0.0f, 0.29f };
float DarkerViolet[3] = { 0.19f, 0.0f, 0.29f };
float DarkerVioletMagenta[3] = { 0.29f, 0.0f, 0.28f };
float DarkerMagenta[3] = { 0.48f, 0.0f, 0.27f };
float DarkerMagentaRed[3] = { 0.47f, 0.27f, 0.14f };
float PaleCoolBrown[3] = { 0.78f, 0.69f, 0.61f };
float LightCoolBrown[3] = { 0.6f, 0.52f, 0.45f };
float MiumCoolBrown[3] = { 0.45f, 0.38f, 0.34f };
float DarkCoolBrown[3] = { 0.32f, 0.27f, 0.25f };
float DarkerCoolBrown[3] = { 0.21f, 0.18f, 0.17f };
float PaleWarmBrown[3] = { 0.77f, 0.61f, 0.43f };
float LightWarmBrown[3] = { 0.65f, 0.48f, 0.32f };
float MiumWarmBrown[3] = { 0.54f, 0.38f, 0.22f };
float DarkWarmBrown[3] = { 0.45f, 0.29f, 0.14f };
float DarkerWarmBrown[3] = { 0.37f, 0.22f, 0.07f };
} color;
struct {
// item set ==============================================================
float aimkey;
float hitbox;
float aimkeypos = 0;
float hitboxpos = 0;
float boneswitch = 1;
float Aim_SpeedX = 1;
float Aim_SpeedY = 1;
float Aim_SpeedZ = 1;
float VisDist = 290;
float AimFOV = 2.0f;
bool show_menu = false;
bool instant_res = false;
bool cross_hair = true;
bool Dynamic_aim = false;
bool drawfov_circle = true;
bool rainbow_colors = false;
bool shield_esp = false;
bool freezeplayer = false;
float rainbow_speed = 0.25f;
bool Aim_Prediction;
bool Rapidfire = false;
float AimDistance = 300;
float RapidFireValue = 1;
float InstantReviveValue = 10;
// enemy ==============================================================
bool Aimbot = true;
bool Esp_box = true;
bool AimBone = false;
bool playerfly = false;
bool esp_corner = false;
bool gliderexploit = false;
bool Distance_Esp = false;
bool skeleton = false;
bool aiminair = false;
bool Locl_line = false;
bool npcbotcheck = false;
bool Esp_line;
//enemy item color ==============================================================
float DrawFOVCircle[4] = { color.Black[0], color.Black[1], color.Black[2] };
float Espbox[3] = { color.RGBBlue[0], color.RGBBlue[1], color.RGBBlue[2] };
float BoxCornerESP[3] = { color.RGBBlue[0], color.RGBBlue[1], color.RGBBlue[2] };
float LineESP[3] = { color.Black[0], color.Black[1], color.Black[2] };
float CrossHair[3] = { color.Black[0], color.Black[1], color.Black[2] };
float LockLine[3] = { color.RGBRed[0], color.RGBRed[1], color.RGBRed[2] };
// item color set ==============================================================
float Thickness = 1.5f;
float Shape = 100.0f;
float Transparency = 0.8f;
//team ==============================================================
bool Team_Aimbot;
bool radar;
bool FOVChanger;
bool chams;
float FOVChangerValueX;
float FOVChangerValueY;
float FOVChangerValueZ;
bool VisableCheck;
} item;
struct FZiplinePawnState {
struct AFortAthenaZiplineBase* Zipline; // 0x00(0x08)
struct AFortAthenaZiplineBase* PreviousZipline; // 0x00(0x08)
struct UPrimitiveComponent* InteractComponent; // 0x10(0x08)
bool bIsZiplining; // 0x18(0x01)
bool bJumped; // 0x19(0x01)
bool bReachedEnd; // 0x1a(0x01)
char pad_1B[0x1]; // 0x1b(0x01)
int32_t AuthoritativeValue; // 0x1c(0x04)struct sdk::structs::FVector SocketOffset; // 0x20(0x0c)
float TimeZipliningBegan; // 0x2c(0x04)
float TimeZipliningEndedFromJump; // 0x30(0x04)
char pad_34[0x4]; // 0x34(0x04)
};
<|start_filename|>D3DX9/d3dx9shape.h<|end_filename|>
///////////////////////////////////////////////////////////////////////////
//
// Copyright (C) Microsoft Corporation. All Rights Reserved.
//
// File: d3dx9shapes.h
// Content: D3DX simple shapes
//
///////////////////////////////////////////////////////////////////////////
#include "d3dx9.h"
#ifndef __D3DX9SHAPES_H__
#define __D3DX9SHAPES_H__
///////////////////////////////////////////////////////////////////////////
// Functions:
///////////////////////////////////////////////////////////////////////////
#ifdef __cplusplus
extern "C" {
#endif //__cplusplus
//-------------------------------------------------------------------------
// D3DXCreatePolygon:
// ------------------
// Creates a mesh containing an n-sided polygon. The polygon is centered
// at the origin.
//
// Parameters:
//
// pDevice The D3D device with which the mesh is going to be used.
// Length Length of each side.
// Sides Number of sides the polygon has. (Must be >= 3)
// ppMesh The mesh object which will be created
// ppAdjacency Returns a buffer containing adjacency info. Can be NULL.
//-------------------------------------------------------------------------
HRESULT WINAPI
D3DXCreatePolygon(
LPDIRECT3DDEVICE9 pDevice,
FLOAT Length,
UINT Sides,
LPD3DXMESH* ppMesh,
LPD3DXBUFFER* ppAdjacency);
//-------------------------------------------------------------------------
// D3DXCreateBox:
// --------------
// Creates a mesh containing an axis-aligned box. The box is centered at
// the origin.
//
// Parameters:
//
// pDevice The D3D device with which the mesh is going to be used.
// Width Width of box (along X-axis)
// Height Height of box (along Y-axis)
// Depth Depth of box (along Z-axis)
// ppMesh The mesh object which will be created
// ppAdjacency Returns a buffer containing adjacency info. Can be NULL.
//-------------------------------------------------------------------------
HRESULT WINAPI
D3DXCreateBox(
LPDIRECT3DDEVICE9 pDevice,
FLOAT Width,
FLOAT Height,
FLOAT Depth,
LPD3DXMESH* ppMesh,
LPD3DXBUFFER* ppAdjacency);
//-------------------------------------------------------------------------
// D3DXCreateCylinder:
// -------------------
// Creates a mesh containing a cylinder. The generated cylinder is
// centered at the origin, and its axis is aligned with the Z-axis.
//
// Parameters:
//
// pDevice The D3D device with which the mesh is going to be used.
// Radius1 Radius at -Z end (should be >= 0.0f)
// Radius2 Radius at +Z end (should be >= 0.0f)
// Length Length of cylinder (along Z-axis)
// Slices Number of slices about the main axis
// Stacks Number of stacks along the main axis
// ppMesh The mesh object which will be created
// ppAdjacency Returns a buffer containing adjacency info. Can be NULL.
//-------------------------------------------------------------------------
HRESULT WINAPI
D3DXCreateCylinder(
LPDIRECT3DDEVICE9 pDevice,
FLOAT Radius1,
FLOAT Radius2,
FLOAT Length,
UINT Slices,
UINT Stacks,
LPD3DXMESH* ppMesh,
LPD3DXBUFFER* ppAdjacency);
//-------------------------------------------------------------------------
// D3DXCreateSphere:
// -----------------
// Creates a mesh containing a sphere. The sphere is centered at the
// origin.
//
// Parameters:
//
// pDevice The D3D device with which the mesh is going to be used.
// Radius Radius of the sphere (should be >= 0.0f)
// Slices Number of slices about the main axis
// Stacks Number of stacks along the main axis
// ppMesh The mesh object which will be created
// ppAdjacency Returns a buffer containing adjacency info. Can be NULL.
//-------------------------------------------------------------------------
HRESULT WINAPI
D3DXCreateSphere(
LPDIRECT3DDEVICE9 pDevice,
FLOAT Radius,
UINT Slices,
UINT Stacks,
LPD3DXMESH* ppMesh,
LPD3DXBUFFER* ppAdjacency);
//-------------------------------------------------------------------------
// D3DXCreateTorus:
// ----------------
// Creates a mesh containing a torus. The generated torus is centered at
// the origin, and its axis is aligned with the Z-axis.
//
// Parameters:
//
// pDevice The D3D device with which the mesh is going to be used.
// InnerRadius Inner radius of the torus (should be >= 0.0f)
// OuterRadius Outer radius of the torue (should be >= 0.0f)
// Sides Number of sides in a cross-section (must be >= 3)
// Rings Number of rings making up the torus (must be >= 3)
// ppMesh The mesh object which will be created
// ppAdjacency Returns a buffer containing adjacency info. Can be NULL.
//-------------------------------------------------------------------------
HRESULT WINAPI
D3DXCreateTorus(
LPDIRECT3DDEVICE9 pDevice,
FLOAT InnerRadius,
FLOAT OuterRadius,
UINT Sides,
UINT Rings,
LPD3DXMESH* ppMesh,
LPD3DXBUFFER* ppAdjacency);
//-------------------------------------------------------------------------
// D3DXCreateTeapot:
// -----------------
// Creates a mesh containing a teapot.
//
// Parameters:
//
// pDevice The D3D device with which the mesh is going to be used.
// ppMesh The mesh object which will be created
// ppAdjacency Returns a buffer containing adjacency info. Can be NULL.
//-------------------------------------------------------------------------
HRESULT WINAPI
D3DXCreateTeapot(
LPDIRECT3DDEVICE9 pDevice,
LPD3DXMESH* ppMesh,
LPD3DXBUFFER* ppAdjacency);
//-------------------------------------------------------------------------
// D3DXCreateText:
// ---------------
// Creates a mesh containing the specified text using the font associated
// with the device context.
//
// Parameters:
//
// pDevice The D3D device with which the mesh is going to be used.
// hDC Device context, with desired font selected
// pText Text to generate
// Deviation Maximum chordal deviation from true font outlines
// Extrusion Amount to extrude text in -Z direction
// ppMesh The mesh object which will be created
// pGlyphMetrics Address of buffer to receive glyph metric data (or NULL)
//-------------------------------------------------------------------------
HRESULT WINAPI
D3DXCreateTextA(
LPDIRECT3DDEVICE9 pDevice,
HDC hDC,
LPCSTR pText,
FLOAT Deviation,
FLOAT Extrusion,
LPD3DXMESH* ppMesh,
LPD3DXBUFFER* ppAdjacency,
LPGLYPHMETRICSFLOAT pGlyphMetrics);
HRESULT WINAPI
D3DXCreateTextW(
LPDIRECT3DDEVICE9 pDevice,
HDC hDC,
LPCWSTR pText,
FLOAT Deviation,
FLOAT Extrusion,
LPD3DXMESH* ppMesh,
LPD3DXBUFFER* ppAdjacency,
LPGLYPHMETRICSFLOAT pGlyphMetrics);
#ifdef UNICODE
#define D3DXCreateText D3DXCreateTextW
#else
#define D3DXCreateText D3DXCreateTextA
#endif
#ifdef __cplusplus
}
#endif //__cplusplus
#endif //__D3DX9SHAPES_H__
<|start_filename|>configuration/utils.h<|end_filename|>
#pragma once
#include <Windows.h>
#include <iostream>
#include <winternl.h>
#include <ntstatus.h>
#include <atomic>
#include <mutex>
#include <TlHelp32.h>
#include <d3d9.h>
#include <dwmapi.h>
#include <xmmintrin.h>
#pragma comment(lib, "ntdll.lib")
#pragma comment(lib, "dwmapi.lib")
typedef enum _KEY_VALUE_INFORMATION_CLASS {
KeyValueBasicInformation,
KeyValueFullInformation,
KeyValuePartialInformation,
KeyValueFullInformationAlign64,
KeyValuePartialInformationAlign64,
KeyValueLayerInformation,
MaxKeyValueInfoClass // MaxKeyValueInfoClass should always be the last enum
} KEY_VALUE_INFORMATION_CLASS;
typedef struct _KEY_VALUE_FULL_INFORMATION {
ULONG TitleIndex;
ULONG Type;
ULONG DataOffset;
ULONG DataLength;
ULONG NameLength;
WCHAR Name[1]; // Variable size
// Data[1]; // Variable size data not declared
} KEY_VALUE_FULL_INFORMATION, * PKEY_VALUE_FULL_INFORMATION;
#ifdef __cplusplus
extern "C++"
{
char _RTL_CONSTANT_STRING_type_check(const char* s);
char _RTL_CONSTANT_STRING_type_check(const WCHAR* s);
// __typeof would be desirable here instead of sizeof.
template <size_t N> class _RTL_CONSTANT_STRING_remove_const_template_class;
template <> class _RTL_CONSTANT_STRING_remove_const_template_class<sizeof(char)> { public: typedef char T; };
template <> class _RTL_CONSTANT_STRING_remove_const_template_class<sizeof(WCHAR)> { public: typedef WCHAR T; };
#define _RTL_CONSTANT_STRING_remove_const_macro(s) \
(const_cast<_RTL_CONSTANT_STRING_remove_const_template_class<sizeof((s)[0])>::T*>(s))
}
#else
char _RTL_CONSTANT_STRING_type_check(const void* s);
#define _RTL_CONSTANT_STRING_remove_const_macro(s) (s)
#endif
#define RTL_CONSTANT_STRING(s) \
{ \
sizeof( s ) - sizeof( (s)[0] ), \
sizeof( s ) / sizeof(_RTL_CONSTANT_STRING_type_check(s)), \
_RTL_CONSTANT_STRING_remove_const_macro(s) \
}
extern "C" {
NTSYSAPI
NTSTATUS
NTAPI
ZwQueryValueKey(
_In_ HANDLE KeyHandle,
_In_ PUNICODE_STRING ValueName,
_In_ KEY_VALUE_INFORMATION_CLASS KeyValueInformationClass,
_Out_writes_bytes_to_opt_(Length, *ResultLength) PVOID KeyValueInformation,
_In_ ULONG Length,
_Out_ PULONG ResultLength
);
NTSYSAPI
NTSTATUS
NTAPI
ZwClose(
_In_ HANDLE Handle
);
NTSYSAPI
NTSTATUS
NTAPI
ZwOpenKey(
_Out_ PHANDLE KeyHandle,
_In_ ACCESS_MASK DesiredAccess,
_In_ POBJECT_ATTRIBUTES ObjectAttributes
);
NTSYSAPI
NTSTATUS
NTAPI
ZwQueryValueKey(
_In_ HANDLE KeyHandle,
_In_ PUNICODE_STRING ValueName,
_In_ KEY_VALUE_INFORMATION_CLASS KeyValueInformationClass,
_Out_writes_bytes_to_opt_(Length, *ResultLength) PVOID KeyValueInformation,
_In_ ULONG Length,
_Out_ PULONG ResultLength
);
NTSYSAPI
NTSTATUS
NTAPI
ZwSetValueKey(
_In_ HANDLE KeyHandle,
_In_ PUNICODE_STRING ValueName,
_In_opt_ ULONG TitleIndex,
_In_ ULONG Type,
_In_reads_bytes_opt_(DataSize) PVOID Data,
_In_ ULONG DataSize
);
NTSYSAPI NTSTATUS ZwCreateKey(
PHANDLE KeyHandle,
ACCESS_MASK DesiredAccess,
POBJECT_ATTRIBUTES ObjectAttributes,
ULONG TitleIndex,
PUNICODE_STRING Class,
ULONG CreateOptions,
PULONG Disposition
);
}
namespace RegistryUtils
{
__forceinline ULONG GetKeyInfoSize(HANDLE hKey, PUNICODE_STRING Key)
{
NTSTATUS Status;
ULONG KeySize;
Status = ZwQueryValueKey(hKey, Key, KeyValueFullInformation, 0, 0, &KeySize);
if (Status == STATUS_BUFFER_TOO_SMALL || Status == STATUS_BUFFER_OVERFLOW)
return KeySize;
return 0;
}
template <typename type>
__forceinline type ReadRegistry(UNICODE_STRING RegPath, UNICODE_STRING Key)
{
HANDLE hKey;
OBJECT_ATTRIBUTES ObjAttr;
NTSTATUS Status = STATUS_UNSUCCESSFUL;
InitializeObjectAttributes(&ObjAttr, &RegPath, OBJ_CASE_INSENSITIVE | OBJ_KERNEL_HANDLE, NULL, NULL);
Status = ZwOpenKey(&hKey, KEY_ALL_ACCESS, &ObjAttr);
if (NT_SUCCESS(Status))
{
ULONG KeyInfoSize = GetKeyInfoSize(hKey, &Key);
ULONG KeyInfoSizeNeeded;
if (KeyInfoSize == NULL)
{
ZwClose(hKey);
return 0;
}
PKEY_VALUE_FULL_INFORMATION pKeyInfo = (PKEY_VALUE_FULL_INFORMATION)malloc(KeyInfoSize);
RtlZeroMemory(pKeyInfo, KeyInfoSize);
Status = ZwQueryValueKey(hKey, &Key, KeyValueFullInformation, pKeyInfo, KeyInfoSize, &KeyInfoSizeNeeded);
if (!NT_SUCCESS(Status) || (KeyInfoSize != KeyInfoSizeNeeded))
{
ZwClose(hKey);
free(pKeyInfo);
return 0;
}
ZwClose(hKey);
free(pKeyInfo);
return *(type*)((LONG64)pKeyInfo + pKeyInfo->DataOffset);
}
return 0;
}
__forceinline bool WriteRegistry(UNICODE_STRING RegPath, UNICODE_STRING Key, PVOID Address, ULONG Type, ULONG Size)
{
bool Success = false;
HANDLE hKey;
OBJECT_ATTRIBUTES ObjAttr;
NTSTATUS Status = STATUS_UNSUCCESSFUL;
InitializeObjectAttributes(&ObjAttr, &RegPath, OBJ_CASE_INSENSITIVE | OBJ_KERNEL_HANDLE, NULL, NULL);
Status = ZwOpenKey(&hKey, KEY_ALL_ACCESS, &ObjAttr);
if (NT_SUCCESS(Status))
{
Status = ZwSetValueKey(hKey, &Key, NULL, Type, Address, Size);
if (NT_SUCCESS(Status))
Success = true;
ZwClose(hKey);
}
else {
Status = ZwCreateKey(&hKey, KEY_ALL_ACCESS, &ObjAttr, 0, &RegPath, 0, 0);
if (NT_SUCCESS(Status))
{
Status = ZwSetValueKey(hKey, &Key, NULL, Type, Address, Size);
if (NT_SUCCESS(Status))
Success = true;
}
ZwClose(hKey);
}
return Success;
}
} | Achtzehn18/Fortnite-External |
<|start_filename|>spug_web/src/pages/deploy/do/index.module.css<|end_filename|>
.header {
display: flex;
}
.collapse {
margin-top: 16px;
}
.collapse :global(.ant-collapse-content-box) {
padding: 0;
}
.ext1Console {
min-height: 40px;
max-height: 300px;
padding: 10px 15px;
}
.ext2Block {
display: flex;
background-color: #fff;
margin-top: 16px;
border-radius: 4px;
border: 1px solid #d9d9d9;
}
.ext2Console {
flex: 1;
padding: 30px;
}
.ext2Step {
padding: 24px;
width: 220px;
border-right: 1px solid #e8e8e8;
}
.ext2Step :global(.ant-steps-item) {
height: 100px;
}
.ext2Tips {
color: #888;
margin-top: 30px;
}
pre {
margin: 0;
}
<|start_filename|>spug_web/src/components/index.module.css<|end_filename|>
.searchForm :global(.ant-form-item) {
display: flex;
}
.searchForm :global(.ant-form-item-control-wrapper) {
flex: 1;
}
.searchForm :global(.ant-form-item-label) {
padding-right: 8px;
}
.statisticsCard {
position: relative;
text-align: center;
}
.statisticsCard span {
color: rgba(0, 0, 0, .45);
display: inline-block;
line-height: 22px;
margin-bottom: 4px;
}
.statisticsCard p {
font-size: 32px;
line-height: 32px;
margin: 0;
}
.statisticsCard em {
background-color: #e8e8e8;
position: absolute;
height: 56px;
width: 1px;
top: 0;
right: 0;
}
<|start_filename|>spug_web/src/pages/schedule/index.module.css<|end_filename|>
.steps {
width: 520px;
margin: 0 auto 30px;
}
.delIcon {
font-size: 24px;
position: relative;
top: 4px
}
.delIcon:hover {
color: #f5222d;
}
<|start_filename|>spug_web/src/pages/system/role/index.module.css<|end_filename|>
.container :global(.ant-modal-footer) {
border-top: 0
}
.table {
width: 100%;
border: 1px solid #dfdfdf;
}
.table :global(.ant-checkbox-group) {
width: 100%;
}
.table th {
background-color: #fafafa;
color: #404040;
font-size: 18px;
font-weight: 500;
padding: 5px 15px;
}
.table td {
padding: 5px 15px;
} | atompi/spug |
<|start_filename|>src/AnyOfCodeGenerator/IntExtensions.cs<|end_filename|>
using System;
namespace AnyOfGenerator
{
internal static class IntExtensions
{
private static readonly string[] Order = new[] { "Zeroth", "First", "Second", "Third", "Fourth", "Fifth", "Sixth", "Seventh", "Eighth", "Ninth", "Tenth" };
public static string Ordinalize(this int value)
{
if (value < Order.Length)
{
return Order[value];
}
throw new NotSupportedException();
}
}
}
<|start_filename|>examples/ConsoleAppConsumerNET472/Program.cs<|end_filename|>
using System;
using AnyOfTypes;
namespace ConsoleAppConsumerNET472
{
class Program
{
static void Main(string[] args)
{
Console.WriteLine(ReturnSomething().CurrentValue);
Console.WriteLine(ReturnSomething().CurrentValue);
Console.WriteLine(ReturnSomething().CurrentValue);
//var c = new ClassLibrary.Class1();
//Console.WriteLine("call X with \"x\" = " + c.X("x"));
//Console.WriteLine("call X with 123 = " + c.X(123));
}
private static AnyOf<string, int, bool> ReturnSomething()
{
switch (new Random().Next(3))
{
case 1:
return "test";
case 2:
return 42;
default:
return true;
}
}
}
}
<|start_filename|>src/AnyOfCodeGeneratorTester/Program.cs<|end_filename|>
using AnyOf.SourceGenerator;
using AnyOfGenerator;
namespace AnyOfCodeGeneratorTester
{
class Program
{
static void Main(string[] args)
{
var generator = new AnyOfCodeGenerator();
generator.Generate(new OutputOptions
{
Type = OutputType.File,
SupportsNullable= false,
Folder = "../../../../AnyOf"
});
}
}
}
<|start_filename|>examples/ClassLibrary2/Class1.cs<|end_filename|>
using AnyOfTypes;
namespace ClassLibrary2
{
public class Class2
{
public AnyOf<string?, int> X2(AnyOf<int?, bool> value)
{
if (value.IsFirst)
{
return $"str = {value.First}";
}
return value.Second ? 1 : 0;
}
}
}
<|start_filename|>src/AnyOf.System.Text.Json/AnyOfJsonConverter.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using AnyOfTypes.System.Text.Json.Extensions;
using AnyOfTypes.System.Text.Json.Matcher;
using AnyOfTypes.System.Text.Json.Matcher.Models;
using Nelibur.ObjectMapper;
namespace AnyOfTypes.System.Text.Json
{
public class AnyOfJsonConverter : JsonConverter<object?>
{
public override object? Read(ref Utf8JsonReader reader, Type? typeToConvert, JsonSerializerOptions options)
{
object? value;
var jsonElement = GetConverter<JsonElement>(options).Read(ref reader, typeof(object), options);
switch (jsonElement.ValueKind)
{
case JsonValueKind.Array:
value = FindBestArrayMatch(jsonElement, typeToConvert, options);
break;
case JsonValueKind.Object:
value = FindBestObjectMatch(jsonElement, typeToConvert?.GetGenericArguments() ?? new Type[0], options);
break;
default:
value = GetSimpleValue(jsonElement);
break;
}
if (typeToConvert is null)
{
return value;
}
if (value is null)
{
return Activator.CreateInstance(typeToConvert);
}
return Activator.CreateInstance(typeToConvert, value);
}
private static object? GetSimpleValue(JsonElement reader)
{
switch (reader.ValueKind)
{
case JsonValueKind.String:
if (reader.TryGetDateTime(out var date))
{
return date;
}
return reader.GetString();
case JsonValueKind.Number:
if (reader.TryGetInt32(out var i))
{
return i;
}
if (reader.TryGetInt64(out var l))
{
return l;
}
if (reader.TryGetUInt32(out var ui))
{
return ui;
}
if (reader.TryGetUInt64(out var ul))
{
return ul;
}
return reader.GetDecimal();
case JsonValueKind.True:
return true;
case JsonValueKind.False:
return false;
case JsonValueKind.Null:
return null;
default:
throw new JsonException($"The ValueKind '{reader.ValueKind}' is not supported.");
}
}
private object? FindBestArrayMatch(JsonElement jsonElement, Type? typeToConvert, JsonSerializerOptions options)
{
var enumerableTypes = typeToConvert?.GetGenericArguments().Where(t => typeof(IEnumerable).IsAssignableFrom(t)).ToArray() ?? new Type[0];
var types = enumerableTypes.Select(t => t.GetElementTypeX()).ToArray();
var list = new List<object?>();
Type? elementType = null;
foreach (var arrayElement in jsonElement.EnumerateArray())
{
object? value;
if (arrayElement.ValueKind == JsonValueKind.Object)
{
value = FindBestObjectMatch(arrayElement, types, options);
}
else
{
value = GetSimpleValue(arrayElement);
}
if (elementType is null)
{
elementType = value?.GetType();
}
list.Add(value);
}
if (elementType is null)
{
return null;
}
var typedListDetails = list.CastToTypedList(elementType);
foreach (var knownIEnumerableType in enumerableTypes)
{
if (knownIEnumerableType.GetElementTypeX() == elementType)
{
TinyMapper.Bind(typedListDetails.ListType, knownIEnumerableType);
return TinyMapper.Map(typedListDetails.ListType, knownIEnumerableType, typedListDetails.List);
}
}
return null;
}
private static object? FindBestObjectMatch(JsonElement objectElement, Type[] types, JsonSerializerOptions options)
{
var properties = new List<PropertyDetails>();
foreach (var element in objectElement.EnumerateObject())
{
var propertyDetails = new PropertyDetails
{
CanRead = true,
CanWrite = true,
IsPublic = true,
Name = element.Name
};
object? val;
switch (element.Value.ValueKind)
{
case JsonValueKind.Object:
val = FindBestObjectMatch(element.Value, types, options);
break;
default:
val = GetSimpleValue(element.Value);
break;
}
propertyDetails.PropertyType = val?.GetType();
propertyDetails.IsValueType = val?.GetType().IsValueType == true;
properties.Add(propertyDetails);
}
var mostSuitableType = MatchFinder.FindBestType(properties, types);
if (mostSuitableType is not null)
{
return ToObject(objectElement, mostSuitableType, options);
}
throw new JsonException("No suitable type found.");
}
public override void Write(Utf8JsonWriter writer, object? value, JsonSerializerOptions options)
{
if (value is null)
{
writer.WriteNullValue();
return;
}
var currentValue = value.GetNullablePropertyValue("CurrentValue");
if (currentValue is null)
{
writer.WriteNullValue();
return;
}
var currentType = value.GetPropertyValue<Type>("CurrentValueType");
JsonSerializer.Serialize(writer, currentValue, currentType, options);
}
public override bool CanConvert(Type objectType)
{
return objectType.FullName.StartsWith("AnyOfTypes.AnyOf`");
}
/// <summary>
/// - https://stackoverflow.com/questions/58138793/system-text-json-jsonelement-toobject-workaround
/// - https://stackoverflow.com/a/58193164/255966
/// </summary>
private static object? ToObject(JsonElement element, Type returnType, JsonSerializerOptions? options = null)
{
var json = element.GetRawText();
return JsonSerializer.Deserialize(json, returnType, options);
}
/// <summary>
/// https://github.com/dahomey-technologies/Dahomey.Json/blob/master/src/Dahomey.Json/Util/Utf8JsonReaderExtensions.cs
/// </summary>
private static JsonConverter<T> GetConverter<T>(JsonSerializerOptions options)
{
return (JsonConverter<T>)options.GetConverter(typeof(T));
}
}
}
<|start_filename|>src/AnyOf.Newtonsoft.Json/AnyOfJsonConverter.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using AnyOfTypes.System.Text.Json.Extensions;
using AnyOfTypes.System.Text.Json.Matcher;
using AnyOfTypes.System.Text.Json.Matcher.Models;
using Nelibur.ObjectMapper;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace AnyOfTypes.Newtonsoft.Json
{
public class AnyOfJsonConverter : JsonConverter
{
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
if (value is null)
{
if (serializer.NullValueHandling == NullValueHandling.Include)
{
serializer.Serialize(writer, value);
}
return;
}
var currentValue = value.GetNullablePropertyValue("CurrentValue");
if (currentValue is null)
{
if (serializer.NullValueHandling == NullValueHandling.Include)
{
serializer.Serialize(writer, currentValue);
}
return;
}
serializer.Serialize(writer, currentValue);
}
/// <summary>
/// See
/// - https://stackoverflow.com/questions/8030538/how-to-implement-custom-jsonconverter-in-json-net
/// - https://stackoverflow.com/a/59286262/255966
/// </summary>
public override object? ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
object? value;
switch (reader.TokenType)
{
case JsonToken.Null:
value = null;
break;
case JsonToken.StartObject:
value = FindBestObjectMatch(reader, objectType?.GetGenericArguments() ?? new Type[0], serializer);
break;
case JsonToken.StartArray:
value = FindBestArrayMatch(reader, objectType, existingValue, serializer);
break;
default:
value = GetSimpleValue(reader, existingValue);
break;
}
if (value is null)
{
return Activator.CreateInstance(objectType);
}
return Activator.CreateInstance(objectType, value);
}
private static object? GetSimpleValue(JsonReader reader, object existingValue)
{
var jValue = new JValue(reader.Value);
object? value;
switch (reader.TokenType)
{
case JsonToken.String:
value = (string)jValue;
break;
case JsonToken.Date:
value = (DateTime)jValue;
break;
case JsonToken.Boolean:
value = (bool)jValue;
break;
case JsonToken.Integer:
value = (int)jValue;
break;
case JsonToken.Float:
value = (double)jValue;
break;
default:
value = jValue.Value;
break;
}
if (value is null)
{
return existingValue;
}
return value;
}
private object? FindBestArrayMatch(JsonReader reader, Type? typeToConvert, object existingValue, JsonSerializer serializer)
{
var enumerableTypes = typeToConvert?.GetGenericArguments().Where(t => typeof(IEnumerable).IsAssignableFrom(t)).ToArray() ?? new Type[0];
var elementTypes = enumerableTypes.Select(t => t.GetElementTypeX()).ToArray();
var list = new List<object?>();
Type? elementType = null;
while (reader.Read() && reader.TokenType != JsonToken.EndArray)
{
object? value;
if (reader.TokenType == JsonToken.StartObject)
{
value = FindBestObjectMatch(reader, elementTypes, serializer);
}
else
{
value = GetSimpleValue(reader, existingValue);
}
if (elementType is null)
{
elementType = value?.GetType();
}
list.Add(value);
}
if (elementType is null)
{
return null;
}
var typedListDetails = list.CastToTypedList(elementType);
foreach (var knownIEnumerableType in enumerableTypes)
{
if (knownIEnumerableType.GetElementTypeX() == elementType)
{
TinyMapper.Bind(typedListDetails.ListType, knownIEnumerableType);
return TinyMapper.Map(typedListDetails.ListType, knownIEnumerableType, typedListDetails.List);
}
}
return null;
}
private static object? FindBestObjectMatch(JsonReader reader, Type[] types, JsonSerializer serializer)
{
var properties = new List<PropertyDetails>();
var jObject = JObject.Load(reader);
foreach (var element in jObject)
{
var propertyDetails = new PropertyDetails
{
CanRead = true,
CanWrite = true,
IsPublic = true,
Name = element.Key
};
var val = element.Value.ToObject<object?>();
propertyDetails.PropertyType = val?.GetType();
propertyDetails.IsValueType = val?.GetType().GetTypeInfo().IsValueType == true;
properties.Add(propertyDetails);
}
var bestType = MatchFinder.FindBestType(properties, types);
if (bestType is not null)
{
var target = Activator.CreateInstance(bestType);
using (JsonReader jObjectReader = CopyReaderForObject(reader, jObject))
{
serializer.Populate(jObjectReader, target);
}
return target;
}
return null;
}
public override bool CanConvert(Type objectType)
{
return objectType.FullName.StartsWith("AnyOfTypes.AnyOf`");
}
private static JsonReader CopyReaderForObject(JsonReader reader, JObject jObject)
{
var jObjectReader = jObject.CreateReader();
jObjectReader.CloseInput = reader.CloseInput;
jObjectReader.Culture = reader.Culture;
jObjectReader.DateFormatString = reader.DateFormatString;
jObjectReader.DateParseHandling = reader.DateParseHandling;
jObjectReader.DateTimeZoneHandling = reader.DateTimeZoneHandling;
jObjectReader.FloatParseHandling = reader.FloatParseHandling;
jObjectReader.MaxDepth = reader.MaxDepth;
jObjectReader.SupportMultipleContent = reader.SupportMultipleContent;
return jObjectReader;
}
}
}
<|start_filename|>examples/ClassLibrary/Class1.cs<|end_filename|>
using AnyOfTypes;
namespace ClassLibrary1
{
public class Class1
{
public AnyOf<int?, bool> X1(AnyOf<string?, int> value)
{
if (value.IsFirst)
{
return value.First == "x";
}
return value.Second;
}
}
}
<|start_filename|>examples/ConsoleAppConsumer/Program.cs<|end_filename|>
using System;
using AnyOfTypes;
namespace ConsoleAppConsumer
{
internal class Program
{
private static void Main(string[] args)
{
var hash = new HashCode();
Console.WriteLine(ReturnSomething().CurrentValue);
Console.WriteLine(new string('-', 50));
var xInt1 = X(42);
var hc = xInt1.GetHashCode();
var xInt2 = X(42);
var xInt3 = X(5);
if (xInt1 == xInt2)
{
Console.WriteLine("---> xInt1 == xInt2");
}
if (xInt1 != xInt3)
{
Console.WriteLine("---> xInt1 != xInt3");
}
if (xInt1.Equals(xInt2))
{
Console.WriteLine("---> xInt1 Equals xInt2");
}
if (!xInt1.Equals(xInt3))
{
Console.WriteLine("---> xInt1 !Equals xInt3");
}
X("test");
Console.WriteLine(new string('-', 50));
X3(42);
X3("test");
X3(DateTime.Now);
Console.WriteLine(new string('-', 50));
}
private static AnyOf<string, int, bool> ReturnSomething()
{
return new Random().Next(3) switch
{
1 => "test",
2 => 42,
_ => true,
};
}
private static AnyOf<int, string> X(AnyOf<int, string> value)
{
Console.WriteLine("ToString " + value.ToString());
Console.WriteLine("CurrentValue " + value.CurrentValue);
Console.WriteLine("IsUndefined " + value.IsUndefined);
Console.WriteLine("IsFirst " + value.IsFirst);
Console.WriteLine("IsSecond " + value.IsSecond);
switch (value.CurrentType)
{
case AnyOfType.First:
Console.WriteLine("AnyOfType = First with value " + value.First);
return value;
case AnyOfType.Second:
Console.WriteLine("AnyOfType = Second with value " + value.Second);
return value;
default:
throw new Exception("???");
}
}
private static void X3(AnyOf<int, string, DateTime> value)
{
Console.WriteLine("ToString " + value.ToString());
Console.WriteLine("CurrentValue " + value.CurrentValue);
Console.WriteLine("IsUndefined " + value.IsUndefined);
Console.WriteLine("IsFirst " + value.IsFirst);
Console.WriteLine("IsSecond " + value.IsSecond);
Console.WriteLine("IsThird " + value.IsThird);
switch (value.CurrentType)
{
case AnyOfType.First:
Console.WriteLine("AnyOfType = First with value " + value.First);
break;
case AnyOfType.Second:
Console.WriteLine("AnyOfType = Second with value " + value.Second);
break;
case AnyOfType.Third:
Console.WriteLine("AnyOfType = Third with value " + value.Third);
break;
default:
Console.WriteLine("????");
break;
}
}
}
}
<|start_filename|>tests/AnyOf.System.Text.Json.Tests/AnyOfJsonConverterTests.cs<|end_filename|>
using System.Collections.Generic;
using System.Text.Json;
using AnyOf.System.Text.Json.Tests.TestModels;
using FluentAssertions;
using Xunit;
namespace AnyOfTypes.System.Text.Json.Tests
{
public class AnyOfJsonConverterTests
{
[Fact]
public void Serialize_AnyOf_With_SimpleTypes()
{
// Arrange
var test = new TestSimpleTypes
{
IntOrString = 1
};
// Act
var options = new JsonSerializerOptions
{
WriteIndented = false
};
options.Converters.Add(new AnyOfJsonConverter());
var json = JsonSerializer.Serialize(test, options);
// Assert
json.Should().Be("{\"IntOrString\":1}");
}
[Fact]
public void Serialize_AnyOf_With_ComplexTypes()
{
// Arrange
var test = new TestComplexTypes
{
AorB = new A
{
Id = 1
}
};
// Act
var options = new JsonSerializerOptions
{
WriteIndented = false
};
options.Converters.Add(new AnyOfJsonConverter());
var json = JsonSerializer.Serialize(test, options);
// Assert
json.Should().Be("{\"AorB\":{\"Id\":1}}");
}
[Fact]
public void Serialize_AnyOf_With_MixedTypes()
{
// Arrange
var test = new TestMixedTypes
{
IntOrStringOrAOrB = 1
};
// Act
var options = new JsonSerializerOptions
{
WriteIndented = false
};
options.Converters.Add(new AnyOfJsonConverter());
var json = JsonSerializer.Serialize(test, options);
// Assert
json.Should().Be("{\"IntOrStringOrAOrB\":1}");
}
[Fact]
public void Serialize_AnyOf_With_IntArray()
{
// Arrange
var test = new TestComplexArray
{
X = new int[] { 42 }
};
// Act
var options = new JsonSerializerOptions
{
WriteIndented = false
};
options.Converters.Add(new AnyOfJsonConverter());
var json = JsonSerializer.Serialize(test, options);
// Assert
json.Should().Be("{\"X\":[42]}");
}
[Fact]
public void Serialize_AnyOf_With_ObjectList()
{
// Arrange
var test = new TestComplexArray
{
X = new List<A> { new A { Id = 1 }, new A { Id = 2 } }
};
// Act
var options = new JsonSerializerOptions
{
WriteIndented = false
};
options.Converters.Add(new AnyOfJsonConverter());
var json = JsonSerializer.Serialize(test, options);
// Assert
json.Should().Be("{\"X\":[{\"Id\":1},{\"Id\":2}]}");
}
[Fact]
public void Deserialize_AnyOf_With_SimpleTypes()
{
// Arrange
var expected = new TestSimpleTypes
{
IntOrString = 1
};
// Act
var options = new JsonSerializerOptions();
options.Converters.Add(new AnyOfJsonConverter());
var result = JsonSerializer.Deserialize<TestSimpleTypes>("{\"IntOrString\":1}", options);
// Assert
result.Should().BeEquivalentTo(expected);
}
[Fact]
public void Deserialize_AnyOf_With_ComplexTypes()
{
// Arrange
var expected = new A
{
Id = 1
};
// Act
var options = new JsonSerializerOptions();
options.Converters.Add(new AnyOfJsonConverter());
var result = JsonSerializer.Deserialize<TestComplexTypes>("{\"AorB\":{\"Id\":1}}", options);
// Assert
result.AorB.First.Should().BeEquivalentTo(expected);
}
[Fact]
public void Deserialize_AnyOf_With_MixedTypes()
{
// Arrange
var expected = new TestMixedTypes
{
IntOrStringOrAOrB = 1
};
// Act
var options = new JsonSerializerOptions();
options.Converters.Add(new AnyOfJsonConverter());
var result = JsonSerializer.Deserialize<TestMixedTypes>("{\"IntOrStringOrAOrB\":1}", options);
// Assert
result.Should().BeEquivalentTo(expected);
}
[Fact]
public void Deserialize_AnyOf_With_IntArray()
{
// Arrange
var expected = new int[] { 42 };
// Act
var options = new JsonSerializerOptions();
options.Converters.Add(new AnyOfJsonConverter());
var result = JsonSerializer.Deserialize<TestComplexArray>("{\"X\":[42]}", options);
// Assert
result.X.First.Should().BeEquivalentTo(expected);
}
[Fact]
public void Deserialize_AnyOf_With_StringList()
{
// Arrange
var expected = new[] { "a", "b" };
// Act
var options = new JsonSerializerOptions();
options.Converters.Add(new AnyOfJsonConverter());
var result = JsonSerializer.Deserialize<TestComplexArray>("{\"X\":[\"a\", \"b\"]}", options);
// Assert
result.X.Second.Should().BeEquivalentTo(expected);
}
[Fact]
public void Deserialize_AnyOf_With_ObjectList_A()
{
// Arrange
var expected = new List<A>
{
new A
{
Id = 1
},
new A
{
Id = 2
}
};
// Act
var options = new JsonSerializerOptions();
options.Converters.Add(new AnyOfJsonConverter());
var result = JsonSerializer.Deserialize<TestComplexArray>("{\"X\":[{\"Id\":1},{\"Id\":2}]}", options);
// Assert
result.X.Third.Should().BeEquivalentTo(expected);
}
[Fact]
public void Deserialize_AnyOf_With_ObjectList_B()
{
// Arrange
var expected = new List<B>
{
new B
{
Guid = "a"
},
new B
{
Guid = "b"
}
};
// Act
var options = new JsonSerializerOptions();
options.Converters.Add(new AnyOfJsonConverter());
var result = JsonSerializer.Deserialize<TestComplexArray>("{\"X\":[{\"Guid\":\"a\"},{\"Guid\":\"b\"}]}", options);
// Assert
result.X.Fourth.Should().BeEquivalentTo(expected);
}
}
}
<|start_filename|>examples/ConsoleAppConsumer/Class1.cs<|end_filename|>
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace AnyOfTypes2
{
public enum AnyOfType
{
Undefined = 0, First, Second, Third, Fourth, Fifth, Sixth, Seventh, Eighth, Ninth, Tenth
}
}
#nullable enable
namespace AnyOfTypes2
{
[DebuggerDisplay("AnyOfType = {_currentType}; Type = {_currentValueType?.Name}; Value = '{ToString()}'")]
public struct AnyOf<TFirst, TSecond>
{
private readonly int _numberOfTypes;
private readonly object? _currentValue; // object -> object?
private readonly Type _currentValueType;
private readonly AnyOfType _currentType;
private readonly TFirst _first;
private readonly TSecond _second;
public bool IsUndefined => _currentType == AnyOfType.Undefined;
public bool IsFirst => _currentType == AnyOfType.First;
public bool IsSecond => _currentType == AnyOfType.Second;
public static implicit operator AnyOf<TFirst, TSecond>(TFirst value) => new AnyOf<TFirst, TSecond>(value);
public static implicit operator TFirst(AnyOf<TFirst, TSecond> @this) => @this.First; // !
public AnyOf(TFirst value)
{
_numberOfTypes = 2;
_currentType = AnyOfType.First;
_currentValue = value;
_currentValueType = typeof(TFirst);
_first = value;
_second = default!; // !
}
public TFirst First // ?
{
get
{
Validate(AnyOfType.First);
return _first;
}
}
public static implicit operator AnyOf<TFirst, TSecond>(TSecond value) => new AnyOf<TFirst, TSecond>(value);
public static implicit operator TSecond(AnyOf<TFirst, TSecond> @this) => @this.Second;
public AnyOf(TSecond value)
{
_numberOfTypes = 2;
_currentType = AnyOfType.Second;
_currentValue = value;
_currentValueType = typeof(TSecond);
_second = value;
_first = default!; // !
}
public TSecond Second //
{
get
{
Validate(AnyOfType.Second);
return _second;
}
}
private void Validate(AnyOfType desiredType)
{
if (desiredType != _currentType)
{
throw new InvalidOperationException($"Attempting to get {desiredType} when {_currentType} is set");
}
}
public AnyOfType CurrentType
{
get
{
return _currentType;
}
}
public object? CurrentValue // object -> object?
{
get
{
return _currentValue;
}
}
public Type CurrentValueType
{
get
{
return _currentValueType;
}
}
public override int GetHashCode()
{
var hash = new HashCode();
hash.Add(_currentValue);
hash.Add(_currentType);
hash.Add(_first);
hash.Add(_second);
hash.Add(typeof(TFirst));
hash.Add(typeof(TSecond));
return hash.ToHashCode();
}
private bool Equals(AnyOf<TFirst, TSecond> other)
{
return _currentType == other._currentType &&
_numberOfTypes == other._numberOfTypes &&
EqualityComparer<object>.Default.Equals(_currentValue, other._currentValue) &&
EqualityComparer<TFirst>.Default.Equals(_first, other._first) &&
EqualityComparer<TSecond>.Default.Equals(_second, other._second);
}
public static bool operator ==(AnyOf<TFirst, TSecond> obj1, AnyOf<TFirst, TSecond> obj2)
{
return obj1.Equals(obj2);
}
public static bool operator !=(AnyOf<TFirst, TSecond> obj1, AnyOf<TFirst, TSecond> obj2)
{
return !obj1.Equals(obj2);
}
public override bool Equals(object? obj) // object -> object?
{
return obj is AnyOf<TFirst, TSecond> o && Equals(o);
}
public override string? ToString() // string -> string?
{
return IsUndefined ? null : $"{_currentValue}";
}
}
}
#nullable disable
<|start_filename|>tests/AnyOfTests/AnyOfTest.cs<|end_filename|>
using System;
using AnyOfTypes;
using FluentAssertions;
using Xunit;
namespace AnyOfTests
{
public class AnyOfTest
{
[Fact]
public void AnyOf_GetHashCode()
{
// Arrange
var anyOfIntAndStringTypeWithIntValue1 = new AnyOf<int, string>(42);
var anyOfIntAndStringTypeWithIntValue2 = new AnyOf<int, string>(42);
var anyOfIntAndStringTypeWithIntValue3 = new AnyOf<int, string>(5);
var anyOfIntAndBoolTypeWithBoolValue = new AnyOf<int, bool>(42);
// Assert
anyOfIntAndStringTypeWithIntValue1.GetHashCode().Should().Be(anyOfIntAndStringTypeWithIntValue2.GetHashCode());
anyOfIntAndStringTypeWithIntValue1.GetHashCode().Should().NotBe(anyOfIntAndStringTypeWithIntValue3.GetHashCode());
anyOfIntAndStringTypeWithIntValue1.GetHashCode().Should().NotBe(anyOfIntAndBoolTypeWithBoolValue.GetHashCode());
}
[Fact]
public void AnyOf_Equals_Method()
{
// Arrange
var anyOfIntAndStringTypeWithIntValue1 = new AnyOf<int, string>(42);
var anyOfIntAndStringTypeWithIntValue2 = new AnyOf<int, string>(42);
var anyOfIntAndStringTypeWithIntValue3 = new AnyOf<int, string>(5);
var anyOfIntAndBoolTypeWithBoolValue = new AnyOf<int, bool>(42);
var normalInt = 42;
// Assert
anyOfIntAndStringTypeWithIntValue1.Equals(anyOfIntAndStringTypeWithIntValue2).Should().BeTrue();
anyOfIntAndStringTypeWithIntValue1.Equals(anyOfIntAndStringTypeWithIntValue3).Should().BeFalse();
anyOfIntAndStringTypeWithIntValue1.Equals(anyOfIntAndBoolTypeWithBoolValue).Should().BeFalse();
anyOfIntAndStringTypeWithIntValue1.Equals(normalInt).Should().BeFalse();
}
[Fact]
public void AnyOf_Equals_Operator()
{
// Arrange
var anyOfIntAndStringTypeWithIntValue1 = new AnyOf<int, string>(42);
var anyOfIntAndStringTypeWithIntValue2 = new AnyOf<int, string>(42);
var anyOfIntAndStringTypeWithIntValue3 = new AnyOf<int, string>(5);
var anyOfIntAndStringTypeWithStringValue = new AnyOf<int, string>("x");
var anyOfIntAndBoolTypeWithIntValue = new AnyOf<int, bool>(42);
var anyOfIntAndBoolTypeWithBoolValue = new AnyOf<int, bool>(true);
var normalBool = true;
var normalInt = 42;
var normalString = "x";
// Assert
(anyOfIntAndStringTypeWithIntValue1 == anyOfIntAndStringTypeWithIntValue2).Should().BeTrue();
(anyOfIntAndStringTypeWithIntValue1 == anyOfIntAndBoolTypeWithIntValue).Should().BeTrue();
(anyOfIntAndStringTypeWithIntValue1 == normalInt).Should().BeTrue();
(anyOfIntAndStringTypeWithIntValue1 == normalString).Should().BeFalse();
(anyOfIntAndBoolTypeWithBoolValue == normalBool).Should().BeTrue();
(anyOfIntAndStringTypeWithStringValue == normalString).Should().BeTrue();
(anyOfIntAndStringTypeWithIntValue1 == anyOfIntAndStringTypeWithIntValue3).Should().BeFalse();
(anyOfIntAndStringTypeWithIntValue1 == anyOfIntAndBoolTypeWithIntValue).Should().BeTrue();
(anyOfIntAndStringTypeWithIntValue1 == anyOfIntAndStringTypeWithStringValue).Should().BeFalse();
}
[Fact]
public void AnyOf_Equals_Operator_Throws_Exception()
{
// Arrange
var anyOfIntAndStringTypeWithIntValue1 = new AnyOf<int, string>(42);
var anyOfIntAndBoolTypeWithBoolValue = new AnyOf<int, bool>(true);
// Assert
Func<bool> a = () => anyOfIntAndStringTypeWithIntValue1 == anyOfIntAndBoolTypeWithBoolValue;
a.Should().Throw<Exception>();
}
}
}
<|start_filename|>src/AnyOf/AnyOfTypes.g.cs<|end_filename|>
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by https://github.com/StefH/AnyOf.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace AnyOfTypes
{
public enum AnyOfType
{
Undefined = 0, First, Second, Third, Fourth, Fifth, Sixth, Seventh, Eighth, Ninth, Tenth
}
}
<|start_filename|>examples/ConsoleAppUsesClassLibraries/Program.cs<|end_filename|>
using ClassLibrary1;
using ClassLibrary2;
namespace ConsoleAppConsumer
{
internal class Program
{
private static void Main(string[] args)
{
var class1 = new Class1();
var result1 = class1.X1("");
var class2 = new Class2();
var result2 = class2.X2(result1); // 👈 CS1503 error
}
}
}
<|start_filename|>src/AnyOf.System.Text.Json/Models/ListDetails.cs<|end_filename|>
using System;
using System.Collections;
namespace AnyOfTypes.System.Text.Json.Matcher.Models
{
internal struct ListDetails
{
public IList List { get; set; }
public Type ListType { get; set; }
}
}
<|start_filename|>src/AnyOf.System.Text.Json/Matcher/MatchFinder.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using AnyOfTypes.System.Text.Json.Extensions;
using AnyOfTypes.System.Text.Json.Matcher.Models;
namespace AnyOfTypes.System.Text.Json.Matcher
{
internal static class MatchFinder
{
public static Type? FindBestType(IEnumerable<PropertyDetails> sourceProperties, Type[] targetTypes, bool returnNullIfNoMatchFound = true)
{
Type? mostSuitableType = null;
int countOfMaxMatchingProperties = -1;
foreach (var targetType in targetTypes)
{
var propMap = GetMatchingProperties(sourceProperties, Map(targetType.GetProperties()));
if (propMap.Count > countOfMaxMatchingProperties)
{
mostSuitableType = targetType;
countOfMaxMatchingProperties = propMap.Count;
}
}
return countOfMaxMatchingProperties == 0 && returnNullIfNoMatchFound ? null : mostSuitableType;
}
private static IList<PropertyMap> GetMatchingProperties(IEnumerable<PropertyDetails> sourceProperties, IEnumerable<PropertyDetails> targetProperties)
{
return
(
from s in sourceProperties
from t in targetProperties
where s.Name == t.Name &&
s.CanRead &&
t.CanWrite &&
s.IsPublic &&
t.IsPublic &&
(s.PropertyType == t.PropertyType || s.PropertyType.IsImplicitlyCastableTo(t.PropertyType) || t.PropertyType.IsImplicitlyCastableTo(s.PropertyType)) &&
(
(s.IsValueType && t.IsValueType) || (s.PropertyType == typeof(string) && t.PropertyType == typeof(string))
)
select new PropertyMap
{
SourceProperty = s,
TargetProperty = t
}
).ToList();
}
public static IList<PropertyMap> GetMatchingProperties(Type sourceType, Type targetType)
{
return GetMatchingProperties(Map(sourceType.GetProperties()), Map(targetType.GetProperties()));
}
private static IEnumerable<PropertyDetails> Map(PropertyInfo[] properties)
{
return properties.Select(p => new PropertyDetails
{
CanRead = p.CanRead,
CanWrite = p.CanWrite,
IsPublic = p.PropertyType.GetTypeInfo().IsPublic,
IsValueType = p.PropertyType.GetTypeInfo().IsValueType,
Name = p.Name,
PropertyType = p.PropertyType
});
}
}
}
<|start_filename|>src/AnyOfCodeGenerator/OutputOptions.cs<|end_filename|>
namespace AnyOf.SourceGenerator
{
public class OutputOptions
{
public OutputType Type { get; set; } = OutputType.Context;
public bool SupportsNullable { get; set; } = true;
public string Folder { get; set; }
}
}
<|start_filename|>src/AnyOf.System.Text.Json/Models/PropertyDetails.cs<|end_filename|>
using System;
namespace AnyOfTypes.System.Text.Json.Matcher.Models
{
internal struct PropertyDetails
{
public string Name { get; set; }
public bool CanRead { get; set; }
public bool CanWrite { get; set; }
public bool IsPublic { get; set; }
public bool IsValueType { get; set; }
public Type? PropertyType { get; set; }
}
}
<|start_filename|>src/AnyOf/HashCodeCalculator.g.cs<|end_filename|>
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by https://github.com/StefH/AnyOf.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using System.Collections.Generic;
using System.Linq;
namespace AnyOfTypes
{
// Code is based on https://github.com/Informatievlaanderen/hashcode-calculator
internal static class HashCodeCalculator
{
public static int GetHashCode(IEnumerable<object> hashFieldValues)
{
const int offset = unchecked((int)2166136261);
const int prime = 16777619;
static int HashCodeAggregator(int hashCode, object value) => value == null
? (hashCode ^ 0) * prime
: (hashCode ^ value.GetHashCode()) * prime;
return hashFieldValues.Aggregate(offset, HashCodeAggregator);
}
}
}
<|start_filename|>src/AnyOf.System.Text.Json/Extensions/Utf8JsonReaderExtensions.cs<|end_filename|>
using System;
using System.Buffers;
using System.Linq;
using System.Text.Json;
namespace AnyOfTypes.System.Text.Json.Extensions
{
internal static class Utf8JsonReaderExtensions
{
public static ReadOnlySpan<byte> GetRawString(this Utf8JsonReader reader)
{
return reader.HasValueSequence ? reader.ValueSequence.ToArray() : reader.ValueSpan;
}
}
}
<|start_filename|>src/AnyOf.System.Text.Json/Extensions/ReflectionHelpers.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using AnyOfTypes.System.Text.Json.Matcher.Models;
namespace AnyOfTypes.System.Text.Json.Extensions
{
internal static class ReflectionHelpers
{
[ThreadStatic]
static readonly Dictionary<KeyValuePair<Type, Type>, bool> ImplicitCastCache = new Dictionary<KeyValuePair<Type, Type>, bool>();
public static T GetPropertyValue<T>(this object instance, string name)
{
var value = GetNullablePropertyValue(instance, name);
if (value is null)
{
throw new InvalidOperationException($"The public property '{name}' has a null value.");
}
return (T)value;
}
public static object? GetNullablePropertyValue(this object instance, string name)
{
var type = instance.GetType();
var propertyInfo = type.GetProperty(name);
if (propertyInfo is null)
{
throw new InvalidOperationException($"The type '{type}' does not contain public property '{name}'.");
}
return propertyInfo.GetValue(instance);
}
public static Type GetElementTypeX(this Type enumerableType)
{
return enumerableType.IsArray == true ? enumerableType.GetElementType() : enumerableType.GetGenericArguments().First();
}
public static ListDetails CastToTypedList(this IList source, Type elementType)
{
var listType = typeof(List<>).MakeGenericType(elementType);
var list = (IList)Activator.CreateInstance(listType);
foreach (var item in source)
{
list.Add(item);
}
return new ListDetails
{
List = list,
ListType = listType
};
}
/// <summary>
/// https://stackoverflow.com/questions/17676838/how-to-check-if-type-can-be-converted-to-another-type-in-c-sharp
/// https://stackoverflow.com/questions/2224266/how-to-tell-if-type-a-is-implicitly-convertible-to-type-b
/// </summary>
public static bool IsImplicitlyCastableTo(this Type? from, Type? to)
{
if (from is null || to is null)
{
return false;
}
if (from == to)
{
return true;
}
var key = new KeyValuePair<Type, Type>(from, to);
if (ImplicitCastCache.TryGetValue(key, out bool result))
{
return result;
}
#if !NETSTANDARD1_3
if (to.IsAssignableFrom(from))
{
return ImplicitCastCache[key] = true;
}
#endif
if (from.GetMethods(BindingFlags.Public | BindingFlags.Static).Any(m => m.ReturnType == to && (m.Name == "op_Implicit" || m.Name == "op_Explicit")))
{
return ImplicitCastCache[key] = true;
}
bool changeType = false;
try
{
var val = Activator.CreateInstance(from);
Convert.ChangeType(val, to);
changeType = true;
}
catch
{
// Ignore
}
return ImplicitCastCache[key] = changeType;
}
}
}
<|start_filename|>src/AnyOf/AnyOf_2.g.cs<|end_filename|>
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by https://github.com/StefH/AnyOf.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.Collections.Generic;
namespace AnyOfTypes
{
[DebuggerDisplay("{_thisType}, AnyOfType = {_currentType}; Type = {_currentValueType?.Name}; Value = '{ToString()}'")]
public struct AnyOf<TFirst, TSecond>
{
private readonly string _thisType => $"AnyOf<{typeof(TFirst).Name}, {typeof(TSecond).Name}>";
private readonly int _numberOfTypes;
private readonly object _currentValue;
private readonly Type _currentValueType;
private readonly AnyOfType _currentType;
private readonly TFirst _first;
private readonly TSecond _second;
public readonly AnyOfType[] AnyOfTypes => new [] { AnyOfType.First, AnyOfType.Second };
public readonly Type[] Types => new [] { typeof(TFirst), typeof(TSecond) };
public bool IsUndefined => _currentType == AnyOfType.Undefined;
public bool IsFirst => _currentType == AnyOfType.First;
public bool IsSecond => _currentType == AnyOfType.Second;
public static implicit operator AnyOf<TFirst, TSecond>(TFirst value) => new AnyOf<TFirst, TSecond>(value);
public static implicit operator TFirst(AnyOf<TFirst, TSecond> @this) => @this.First;
public AnyOf(TFirst value)
{
_numberOfTypes = 2;
_currentType = AnyOfType.First;
_currentValue = value;
_currentValueType = typeof(TFirst);
_first = value;
_second = default;
}
public TFirst First
{
get
{
Validate(AnyOfType.First);
return _first;
}
}
public static implicit operator AnyOf<TFirst, TSecond>(TSecond value) => new AnyOf<TFirst, TSecond>(value);
public static implicit operator TSecond(AnyOf<TFirst, TSecond> @this) => @this.Second;
public AnyOf(TSecond value)
{
_numberOfTypes = 2;
_currentType = AnyOfType.Second;
_currentValue = value;
_currentValueType = typeof(TSecond);
_second = value;
_first = default;
}
public TSecond Second
{
get
{
Validate(AnyOfType.Second);
return _second;
}
}
private void Validate(AnyOfType desiredType)
{
if (desiredType != _currentType)
{
throw new InvalidOperationException($"Attempting to get {desiredType} when {_currentType} is set");
}
}
public AnyOfType CurrentType
{
get
{
return _currentType;
}
}
public object CurrentValue
{
get
{
return _currentValue;
}
}
public Type CurrentValueType
{
get
{
return _currentValueType;
}
}
public override int GetHashCode()
{
var fields = new object[]
{
_numberOfTypes,
_currentValue,
_currentType,
_first,
_second,
typeof(TFirst),
typeof(TSecond),
};
return HashCodeCalculator.GetHashCode(fields);
}
private bool Equals(AnyOf<TFirst, TSecond> other)
{
return _currentType == other._currentType &&
_numberOfTypes == other._numberOfTypes &&
EqualityComparer<object>.Default.Equals(_currentValue, other._currentValue) &&
EqualityComparer<TFirst>.Default.Equals(_first, other._first) &&
EqualityComparer<TSecond>.Default.Equals(_second, other._second);
}
public static bool operator ==(AnyOf<TFirst, TSecond> obj1, AnyOf<TFirst, TSecond> obj2)
{
return obj1.Equals(obj2);
}
public static bool operator !=(AnyOf<TFirst, TSecond> obj1, AnyOf<TFirst, TSecond> obj2)
{
return !obj1.Equals(obj2);
}
public override bool Equals(object obj)
{
return obj is AnyOf<TFirst, TSecond> o && Equals(o);
}
public override string ToString()
{
return IsUndefined ? null : $"{_currentValue}";
}
}
}
<|start_filename|>examples/ConsoleAppConsumerViaNuGet/Program.cs<|end_filename|>
using System;
using AnyOfTypes;
namespace ConsoleAppConsumer
{
internal class Program
{
private static void Main(string[] args)
{
//object c = 1000;
//object cast = (AnyOf<int, string>) c;
X(42);
X("test");
}
private static void X(AnyOf<int, string> value)
{
Console.WriteLine($"{value}");
Console.WriteLine("ToString " + value.ToString());
Console.WriteLine("CurrentValue " + value.CurrentValue);
Console.WriteLine("CurrentValueType " + value.CurrentValueType);
Console.WriteLine("IsUndefined " + value.IsUndefined);
Console.WriteLine("IsFirst " + value.IsFirst);
Console.WriteLine("IsSecond " + value.IsSecond);
switch (value.CurrentType)
{
case AnyOfType.First:
Console.WriteLine("AnyOfType = First with value " + value.First);
break;
case AnyOfType.Second:
Console.WriteLine("AnyOfType = Second with value " + value.Second);
break;
default:
Console.WriteLine($"AnyOfType = {value.CurrentType} with value ...");
break;
}
}
}
}
<|start_filename|>src/AnyOf.System.Text.Json/Models/PropertyMap.cs<|end_filename|>
namespace AnyOfTypes.System.Text.Json.Matcher.Models
{
internal struct PropertyMap
{
public PropertyDetails SourceProperty { get; set; }
public PropertyDetails TargetProperty { get; set; }
}
}
<|start_filename|>src/AnyOfCodeGenerator/OutputType.cs<|end_filename|>
namespace AnyOf.SourceGenerator
{
public enum OutputType
{
Context = 0,
Console,
File
}
}
<|start_filename|>src/AnyOfCodeGenerator/AnyOfCodeGenerator.cs<|end_filename|>
using System;
using System.IO;
using System.Linq;
using System.Text;
using AnyOf.SourceGenerator;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.Text;
namespace AnyOfGenerator
{
[Generator]
public class AnyOfCodeGenerator : ISourceGenerator
{
private const int Max = 10;
public void Initialize(GeneratorInitializationContext context)
{
// No initialization required
}
public void Execute(GeneratorExecutionContext context)
{
bool supportsNullable = context.ParseOptions switch
{
CSharpParseOptions csharpParseOptions => csharpParseOptions.LanguageVersion >= LanguageVersion.CSharp8,
// VisualBasicParseOptions visualBasicParseOptions => visualBasicParseOptions.LanguageVersion >= Microsoft.CodeAnalysis.VisualBasic.LanguageVersion.VisualBasic16,
_ => throw new NotSupportedException("Only C# is supported."),
};
bool nullableEnabled = context.Compilation.Options.NullableContextOptions != NullableContextOptions.Disable;
Generate(new OutputOptions { Type = OutputType.Context, SupportsNullable = supportsNullable }, context);
}
public void Generate(OutputOptions options)
{
Generate(options, null);
}
private static void Generate(OutputOptions options, GeneratorExecutionContext? context)
{
BuildHashCodeCalculatorClass(options, context);
BuildAnyOfTypesEnumClass(options, context);
// BuildBaseClass(options, context);
for (int numberOfTypes = 2; numberOfTypes <= Max; numberOfTypes++)
{
BuildTxClass(options, context, numberOfTypes);
}
}
private static string[] GetTypeNames(int numberOfTypes)
{
return Enumerable.Range(0, numberOfTypes).Select(idx => $"{(idx + 1).Ordinalize()}").ToArray();
}
private static void BuildHashCodeCalculatorClass(OutputOptions options, GeneratorExecutionContext? context)
{
const string filename = "HashCodeCalculator.g.cs";
var sb = new StringBuilder();
sb.Append(AddHeader());
sb.AppendLine("using System.Collections.Generic;");
sb.AppendLine("using System.Linq;");
sb.AppendLine();
sb.AppendLine("namespace AnyOfTypes");
sb.AppendLine("{");
sb.AppendLine(" // Code is based on https://github.com/Informatievlaanderen/hashcode-calculator");
var method = @" internal static class HashCodeCalculator
{
public static int GetHashCode(IEnumerable<object> hashFieldValues)
{
const int offset = unchecked((int)2166136261);
const int prime = 16777619;
static int HashCodeAggregator(int hashCode, object value) => value == null
? (hashCode ^ 0) * prime
: (hashCode ^ value.GetHashCode()) * prime;
return hashFieldValues.Aggregate(offset, HashCodeAggregator);
}
}";
sb.AppendLine(method);
sb.AppendLine("}");
string code = sb.ToString();
switch (options.Type)
{
case OutputType.Console:
Console.WriteLine(code);
break;
case OutputType.File:
File.WriteAllText(Path.Combine(options.Folder, filename), code);
break;
default:
context?.AddSource(filename, SourceText.From(code, Encoding.UTF8));
break;
}
}
private static void BuildAnyOfTypesEnumClass(OutputOptions options, GeneratorExecutionContext? context)
{
const string filename = "AnyOfTypes.g.cs";
var typeNames = GetTypeNames(Max);
var typesAsString = string.Join(", ", typeNames);
var sb = new StringBuilder();
sb.Append(AddHeader());
sb.AppendLine("namespace AnyOfTypes");
sb.AppendLine("{");
sb.AppendLine(" public enum AnyOfType");
sb.AppendLine(" {");
sb.AppendLine($" Undefined = 0, {typesAsString}");
sb.AppendLine(" }");
sb.AppendLine("}");
string code = sb.ToString();
switch (options.Type)
{
case OutputType.Console:
Console.WriteLine(code);
break;
case OutputType.File:
File.WriteAllText(Path.Combine(options.Folder, filename), code);
break;
default:
context?.AddSource(filename, SourceText.From(code, Encoding.UTF8));
break;
}
}
private static void BuildBaseClass(OutputOptions options, GeneratorExecutionContext? context)
{
var nullable = options.SupportsNullable ? "?" : string.Empty;
var sb = new StringBuilder();
sb.Append(AddHeader());
if (options.SupportsNullable)
{
sb.AppendLine("#nullable enable");
}
sb.AppendLine("using System;");
sb.AppendLine("using System.Diagnostics;");
sb.AppendLine("using System.Collections.Generic;");
sb.AppendLine();
sb.AppendLine("namespace AnyOfTypes");
sb.AppendLine("{");
sb.AppendLine($" public struct AnyOfBase");
sb.AppendLine(" {");
sb.AppendLine(" public virtual int NumberOfTypes { get; private set; }");
sb.AppendLine($" private virtual object{nullable} _currentValue;");
sb.AppendLine(" private virtual readonly Type _currentValueType;");
sb.AppendLine(" private virtual readonly AnyOfType _currentType;");
sb.AppendLine();
AddProperty(sb, "AnyOfType", "CurrentType", "_currentType");
AddProperty(sb, $"object{nullable}", "CurrentValue", "_currentValue");
AddProperty(sb, "Type", "CurrentValueType", "_currentValueType");
sb.AppendLine(" }");
sb.AppendLine("}");
if (options.SupportsNullable)
{
sb.AppendLine("#nullable disable");
}
var code = sb.ToString();
var filename = $"AnyOfBase.g.cs";
switch (options.Type)
{
case OutputType.Console:
Console.WriteLine(code);
break;
case OutputType.File:
File.WriteAllText(Path.Combine(options.Folder, filename), code);
break;
default:
context?.AddSource(filename, SourceText.From(code, Encoding.UTF8));
break;
}
}
private static void BuildTxClass(OutputOptions options, GeneratorExecutionContext? context, int numberOfTypes)
{
var typeNames = GetTypeNames(numberOfTypes);
var genericTypesAsCommaSeparatedString = string.Join(", ", typeNames.Select(t => $"T{t}"));
var typesAsCommaSeparatedString = $"{string.Join(", ", typeNames.Select(t => $"typeof(T{t})"))}";
var thisType = $"AnyOf<{string.Join(", ", typeNames.Select(t => $"{{typeof(T{t}).Name}}"))}>";
var nullable = options.SupportsNullable ? "?" : string.Empty;
var @default = options.SupportsNullable ? "!" : string.Empty;
var sb = new StringBuilder();
sb.Append(AddHeader());
if (options.SupportsNullable)
{
sb.AppendLine("#nullable enable");
}
sb.AppendLine("using System;");
sb.AppendLine("using System.Diagnostics;");
sb.AppendLine("using System.Collections.Generic;");
sb.AppendLine();
sb.AppendLine("namespace AnyOfTypes");
sb.AppendLine("{");
sb.AppendLine(" [DebuggerDisplay(\"{_thisType}, AnyOfType = {_currentType}; Type = {_currentValueType?.Name}; Value = '{ToString()}'\")]");
sb.AppendLine($" public struct AnyOf<{genericTypesAsCommaSeparatedString}>");
sb.AppendLine(" {");
sb.AppendLine($" private readonly string _thisType => $\"{thisType}\";");
sb.AppendLine(" private readonly int _numberOfTypes;");
sb.AppendLine($" private readonly object{nullable} _currentValue;");
sb.AppendLine(" private readonly Type _currentValueType;");
sb.AppendLine(" private readonly AnyOfType _currentType;");
sb.AppendLine();
Array.ForEach(typeNames, t => sb.AppendLine($" private readonly T{t} _{t.ToLowerInvariant()};"));
sb.AppendLine();
sb.AppendLine($" public readonly AnyOfType[] AnyOfTypes => new [] {{ {string.Join(", ", typeNames.Select(t => $"AnyOfType.{t}"))} }};");
sb.AppendLine($" public readonly Type[] Types => new [] {{ {typesAsCommaSeparatedString} }};");
sb.AppendLine(" public bool IsUndefined => _currentType == AnyOfType.Undefined;");
Array.ForEach(typeNames, t => sb.AppendLine($" public bool Is{t} => _currentType == AnyOfType.{t};"));
sb.AppendLine();
Array.ForEach(typeNames, t =>
{
sb.AppendLine($" public static implicit operator AnyOf<{genericTypesAsCommaSeparatedString}>(T{t} value) => new AnyOf<{genericTypesAsCommaSeparatedString}>(value);");
sb.AppendLine();
sb.AppendLine($" public static implicit operator T{t}(AnyOf<{genericTypesAsCommaSeparatedString}> @this) => @this.{t};");
sb.AppendLine();
sb.AppendLine($" public AnyOf(T{t} value)");
sb.AppendLine(" {");
sb.AppendLine($" _numberOfTypes = {numberOfTypes};");
sb.AppendLine($" _currentType = AnyOfType.{t};");
sb.AppendLine($" _currentValue = value;");
sb.AppendLine($" _currentValueType = typeof(T{t});");
sb.AppendLine($" _{t.ToLowerInvariant()} = value;");
Array.ForEach(typeNames.Except(new[] { t }).ToArray(), dt => sb.AppendLine($" _{dt.ToLowerInvariant()} = default{@default};"));
sb.AppendLine(" }");
sb.AppendLine();
sb.AppendLine($" public T{t} {t}");
sb.AppendLine(" {");
sb.AppendLine(" get");
sb.AppendLine(" {");
sb.AppendLine($" Validate(AnyOfType.{t});");
sb.AppendLine($" return _{t.ToLowerInvariant()};");
sb.AppendLine(" }");
sb.AppendLine(" }");
sb.AppendLine();
});
sb.AppendLine(" private void Validate(AnyOfType desiredType)");
sb.AppendLine(" {");
sb.AppendLine(" if (desiredType != _currentType)");
sb.AppendLine(" {");
sb.AppendLine(" throw new InvalidOperationException($\"Attempting to get {desiredType} when {_currentType} is set\");");
sb.AppendLine(" }");
sb.AppendLine(" }");
sb.AppendLine();
AddProperty(sb, "AnyOfType", "CurrentType", "_currentType");
AddProperty(sb, $"object{nullable}", "CurrentValue", "_currentValue");
AddProperty(sb, "Type", "CurrentValueType", "_currentValueType");
sb.AppendLine(" public override int GetHashCode()");
sb.AppendLine(" {");
sb.AppendLine(" var fields = new object[]");
sb.AppendLine(" {");
sb.AppendLine(" _numberOfTypes,");
sb.AppendLine(" _currentValue,");
sb.AppendLine(" _currentType,");
Array.ForEach(typeNames, t => sb.AppendLine($" _{t.ToLowerInvariant()},"));
Array.ForEach(typeNames, t => sb.AppendLine($" typeof(T{t}),"));
sb.AppendLine(" };");
sb.AppendLine(" return HashCodeCalculator.GetHashCode(fields);");
sb.AppendLine(" }");
sb.AppendLine();
sb.AppendLine($" private bool Equals(AnyOf<{genericTypesAsCommaSeparatedString}> other)");
sb.AppendLine(" {");
sb.AppendLine(" return _currentType == other._currentType &&");
sb.AppendLine(" _numberOfTypes == other._numberOfTypes &&");
sb.AppendLine($" EqualityComparer<object{nullable}>.Default.Equals(_currentValue, other._currentValue) &&");
Array.ForEach(typeNames, t => sb.AppendLine($" EqualityComparer<T{t}>.Default.Equals(_{t.ToLowerInvariant()}, other._{t.ToLowerInvariant()}){(t == typeNames.Last() ? ";" : " &&")}"));
sb.AppendLine(" }");
sb.AppendLine();
sb.AppendLine($" public static bool operator ==(AnyOf<{genericTypesAsCommaSeparatedString}> obj1, AnyOf<{genericTypesAsCommaSeparatedString}> obj2)");
sb.AppendLine(" {");
sb.AppendLine($" return obj1.Equals(obj2);");
sb.AppendLine(" }");
sb.AppendLine();
sb.AppendLine($" public static bool operator !=(AnyOf<{genericTypesAsCommaSeparatedString}> obj1, AnyOf<{genericTypesAsCommaSeparatedString}> obj2)");
sb.AppendLine(" {");
sb.AppendLine($" return !obj1.Equals(obj2);");
sb.AppendLine(" }");
sb.AppendLine();
sb.AppendLine($" public override bool Equals(object{nullable} obj)");
sb.AppendLine(" {");
sb.AppendLine($" return obj is AnyOf<{genericTypesAsCommaSeparatedString}> o && Equals(o);");
sb.AppendLine(" }");
sb.AppendLine();
sb.AppendLine($" public override string{nullable} ToString()");
sb.AppendLine(" {");
sb.AppendLine(" return IsUndefined ? null : $\"{_currentValue}\";");
sb.AppendLine(" }");
sb.AppendLine(" }");
sb.AppendLine("}");
if (options.SupportsNullable)
{
sb.AppendLine("#nullable disable");
}
var code = sb.ToString();
var filename = $"AnyOf_{numberOfTypes}.g.cs";
switch (options.Type)
{
case OutputType.Console:
Console.WriteLine(code);
break;
case OutputType.File:
File.WriteAllText(Path.Combine(options.Folder, filename), code);
break;
default:
context?.AddSource(filename, SourceText.From(code, Encoding.UTF8));
break;
}
}
private static void AddProperty(StringBuilder src, string type, string name, string privateField)
{
//src.AppendLine($" public {type} {name}");
//src.AppendLine(" {");
//src.AppendLine(" get");
//src.AppendLine(" {");
//src.AppendLine($" return {privateField};");
//src.AppendLine(" }");
//src.AppendLine(" }");
//src.AppendLine();
src.AppendLine($" public {type} {name}");
src.AppendLine(" {");
src.AppendLine(" get");
src.AppendLine(" {");
src.AppendLine($" return {privateField};");
src.AppendLine(" }");
src.AppendLine(" }");
src.AppendLine();
}
private static StringBuilder AddHeader()
{
var sb = new StringBuilder();
sb.AppendLine("//------------------------------------------------------------------------------");
sb.AppendLine("// <auto-generated>");
sb.AppendLine("// This code was generated by https://github.com/StefH/AnyOf.");
sb.AppendLine("//");
sb.AppendLine("// Changes to this file may cause incorrect behavior and will be lost if");
sb.AppendLine("// the code is regenerated.");
sb.AppendLine("// </auto-generated>");
sb.AppendLine("//------------------------------------------------------------------------------");
sb.AppendLine();
return sb;
}
}
}
<|start_filename|>tests/AnyOf.System.Text.Json.Tests/TestModels/TestModels.cs<|end_filename|>
using System.Collections.Generic;
using AnyOfTypes;
namespace AnyOf.System.Text.Json.Tests.TestModels
{
public class TestComplexTypes
{
public AnyOf<A, B> AorB { get; set; }
}
public class TestSimpleTypes
{
public AnyOf<int, string> IntOrString { get; set; }
}
public class TestMixedTypes
{
public AnyOf<int, string, A, B> IntOrStringOrAOrB { get; set; }
}
public class TestComplexArray
{
public AnyOf<int[], List<string>, List<A>, IEnumerable<B>> X { get; set; }
}
public class A
{
public int Id { get; set; }
}
public class B
{
public string Guid { get; set; }
}
} | StefH/AnyOf |
<|start_filename|>Assets/Scripts/Editor/src/Tilemap3DEditor.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using SardineFish.Utils;
using SardineFish.Utils.Editor;
using UnityEditor;
using UnityEngine;
using WFC.Test;
using WFC.Tilemap3D;
namespace WFC.Editor
{
public class Tilemap3DEditor : EditorWindow
{
[SerializeField] private Texture2D _iconCursor;
[SerializeField] private Texture2D _iconBrush;
[SerializeField] private Texture2D _iconColorPicker;
[SerializeField] private Texture2D _iconEraser;
enum EditMode
{
None,
Paint,
Pick,
Erase,
}
private static EditMode[] ToolsMode = new[]
{
EditMode.None,
EditMode.Paint,
EditMode.Pick,
EditMode.Erase,
};
private EditMode _editMode = EditMode.None;
private List<Tilemap3D.GameObjectTilemap> _tilemaps = new List<Tilemap3D.GameObjectTilemap>();
private Tilemap3D.GameObjectTilemap _palette;
private Tilemap3D.GameObjectTilemap _editingGameObjectTilemap;
private int _controlID;
private bool _shouldReload = true;
private GameObjectTile _selectedTile;
[MenuItem("Window/Tilemap 3D")]
private static void ShowWindow()
{
var window = GetWindow<Tilemap3DEditor>();
window.titleContent = new GUIContent("Tilemap 3D Editor");
window.Show();
}
private void OnGUI()
{
EditorUtils.Horizontal(() =>
{
var toolIdx = ToolsMode.IndexOf(_editMode);
toolIdx = GUILayout.SelectionGrid(toolIdx, new GUIContent[]
{
EditorGUIUtility.IconContent("Grid.Default"),
EditorGUIUtility.IconContent("Grid.PaintTool"),
EditorGUIUtility.IconContent("Grid.PickingTool"),
EditorGUIUtility.IconContent("Grid.EraserTool"),
}, 4);
_editMode = ToolsMode[toolIdx];
});
var selected = _palette ? _tilemaps.IndexOf(_palette) : -1;
// var idx = EditorGUILayout.DropdownButton(new GUIContent(selected), FocusType.Keyboard,
// _tilemaps.Select(t => new GUIContent(t.name)).ToArray());
selected = EditorGUILayout.Popup("Palette", selected, _tilemaps.Select(t => t.name).ToArray());
if (selected >= 0)
_palette = _tilemaps[selected];
else
_palette = null;
selected = _editingGameObjectTilemap ? _tilemaps.IndexOf(_editingGameObjectTilemap) : -1;
// var idx = EditorGUILayout.DropdownButton(new GUIContent(selected), FocusType.Keyboard,
// _tilemaps.Select(t => new GUIContent(t.name)).ToArray());
selected = EditorGUILayout.Popup("Palette", selected, _tilemaps.Select(t => t.name).ToArray());
if (selected >= 0)
_editingGameObjectTilemap = _tilemaps[selected];
else
_editingGameObjectTilemap = null;
if (GUILayout.Button("Refresh"))
{
Refresh();
}
EditorGUILayout.LabelField("Selected Tile", _selectedTile ? _selectedTile.name : "<None>");
}
private void OnEnable()
{
SceneView.duringSceneGui += OnScene;
AssemblyReloadEvents.afterAssemblyReload += AssemblyReload;
EditorApplication.playModeStateChanged += PlayModeChanged;
_tilemaps = GameObject.FindObjectsOfType<Tilemap3D.GameObjectTilemap>().ToList();
_controlID = GUIUtility.GetControlID(FocusType.Passive);
}
private void OnDisable()
{
SceneView.duringSceneGui -= OnScene;
AssemblyReloadEvents.afterAssemblyReload -= AssemblyReload;
EditorApplication.playModeStateChanged -= PlayModeChanged;
}
void Refresh()
{
_tilemaps = GameObject.FindObjectsOfType<Tilemap3D.GameObjectTilemap>().ToList();
if (!Application.isPlaying)
{
foreach (var tilemap in _tilemaps)
{
tilemap.ReloadTileFromChildren();
}
}
}
private void PlayModeChanged(PlayModeStateChange obj)
{
Refresh();
}
private void AssemblyReload()
{
// _shouldReload = true;
foreach (var tilemap in _tilemaps)
{
tilemap.ReloadTileFromChildren();
}
}
private void OnScene(SceneView obj)
{
if (_selectedTile)
{
Handles.color = Color.cyan;
Handles.DrawWireCube(_selectedTile.Position + (Vector3.one / 2), Vector3.one);
}
if (_editMode == EditMode.None)
return;
HandleUtility.AddDefaultControl(GUIUtility.GetControlID(FocusType.Passive));
var ev = Event.current;
if (!(ev.type == EventType.MouseDown && ev.button == 0))
return;
if (_editMode == EditMode.Pick && _palette)
{
var ray = HandleUtility.GUIPointToWorldRay(ev.mousePosition);
_selectedTile = _palette.RayMarch(ray, 100);
_editMode = EditMode.Paint;
ev.Use();
}
else if (_editMode == EditMode.Paint && _editingGameObjectTilemap && _selectedTile)
{
var ray = HandleUtility.GUIPointToWorldRay(ev.mousePosition);
var tile = _editingGameObjectTilemap.RayMarch(ray, 100, out var hitPos, out var normal);
Vector3Int pos;
if (tile)
pos = hitPos + normal;
else
{
pos = (ray.origin + ray.direction * (-ray.origin.y / ray.direction.y)).FloorToVector3Int();
}
// var tobj = new GameObject();
// var test = tobj.AddComponent<TestRayMarching>();
// var objFrom = new GameObject();
// var objTo = new GameObject();
// test.From = objFrom.transform;
// test.To = objTo.transform;
// objFrom.transform.position = ray.origin;
// objTo.transform.position = ray.origin + ray.direction * 100;
// tile.gameObject.name = "Fuck";
_editingGameObjectTilemap.SetTile(pos, _selectedTile);
}
else if (_editMode == EditMode.Erase && _editingGameObjectTilemap)
{
var ray = HandleUtility.GUIPointToWorldRay(ev.mousePosition);
var tile = _editingGameObjectTilemap.RayMarch(ray, 100, out var hitPos, out _);
if (tile)
{
_editingGameObjectTilemap.RemoveTile(hitPos);
}
}
}
}
}
<|start_filename|>Assets/Scripts/WFC/WFCGenerator.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using SardineFish.Utils;
using UnityEngine;
using Random = System.Random;
namespace WFC
{
public class WFCGenerator<T>
{
private const float EntropyBias = 0.01f;
public Vector3Int Size { get; private set; }
public readonly HashSet<Pattern<T>> Patterns = new HashSet<Pattern<T>>();
public ChunkState<Pattern<T>>[,,] ChunkStates;
private Random _random;
private readonly Stack<Vector3Int> _propagationStack = new Stack<Vector3Int>();
public Vector3Int[] NeighborOffset { get; private set; }
// private static readonly Vector2Int[] AdjacentDelta = new[]
// {
// Vector2Int.right,
// Vector2Int.up,
// Vector2Int.left,
// Vector2Int.down,
// };
public WFCGenerator(Vector3Int size, Vector3Int[] neighborOffset, IEnumerable<Pattern<T>> patterns)
{
Size = size;
Patterns = new HashSet<Pattern<T>>(patterns);
ChunkStates = new ChunkState<Pattern<T>>[size.x, size.y, size.z];
NeighborOffset = neighborOffset;
_random = new Random();
}
public void Resize(Vector3Int size)
{
Size = size;
ChunkStates = new ChunkState<Pattern<T>>[size.x, size.y, size.z];
}
public void Reset(int seed)
{
_propagationStack.Clear();
_random = new Random(seed);
for (var x = 0; x < Size.x; x++)
for (var y = 0; y < Size.y; y++)
for (var z = 0; z < Size.z; z++)
{
ChunkStates[x, y, z] = new ChunkState<Pattern<T>>(Patterns, NeighborOffset.Length);
}
}
public void SetBoundaryPattern(Pattern<T> boundPattern)
{
var bounds = new BoundsInt(Vector3Int.zero, Size);
for (var idx = 0; idx < NeighborOffset.Length; idx++)
{
var offset = NeighborOffset[idx];
var contraryIdx = NeighborOffset.IndexOf(-offset);
foreach (var pos in bounds.BoundaryIter(offset))
{
var chunk = ChunkStates[pos.x, pos.y, pos.z];
var count = chunk.Compatibles.Count;
// chunk.Compatibles.RemoveWhere(p => !p.Neighbors[idx].Contains(boundPattern));
chunk.UpdateCompatibleFrom(boundPattern.Neighbors[contraryIdx]);
if(count != chunk.Compatibles.Count)
_propagationStack.Push(pos);
}
}
CoroutineRunner.Run(PropagateProgressive().GetEnumerator());
}
public IEnumerable<Vector3Int> RunProgressive()
{
while (true)
{
if (!Observe(out var chunkPos))
yield break;
// PropagateProgressive();
CoroutineRunner.Run(PropagateProgressive().GetEnumerator());
// foreach (var t in PropagateProgressive())
// {
// yield return chunkPos;
// }
yield return chunkPos;
}
}
bool Observe(out Vector3Int chunkPos)
{
var minChunk = -Vector3Int.one;
var minEntropy = float.MaxValue;
for (var x = 0; x < Size.x; x++)
for (var y = 0; y < Size.y; y++)
for (var z = 0; z < Size.z; z++)
{
var chunk = ChunkStates[x, y, z];
if (chunk.Definite)
continue;
// Add a little random bias to pick randomly with multiple min entropy chunk
float bias = EntropyBias * (float) _random.NextDouble();
if (chunk.Entropy + bias < minEntropy)
{
minChunk = new Vector3Int(x, y, z);
minEntropy = chunk.Entropy + bias;
}
}
chunkPos = default;
if (minChunk.x < 0)
return false;
var observeChunk = ChunkStates[minChunk.x, minChunk.y, minChunk.z];
var pattern = observeChunk.Compatibles
.RandomTake((float) _random.NextDouble(), ptn => ptn.Weight);
observeChunk.CollapseTo(pattern);
ChunkStates[minChunk.x, minChunk.y, minChunk.z] = observeChunk;
_propagationStack.Push(minChunk);
chunkPos = minChunk;
return true;
}
IEnumerable PropagateProgressive()
{
while (_propagationStack.Count > 0)
{
var changedPos = _propagationStack.Pop();
var chunk = ChunkStates[changedPos.x, changedPos.y, changedPos.z];
for(var dir = 0; dir < NeighborOffset.Length; dir++)
{
var delta = NeighborOffset[dir];
var adjacentPos = changedPos + delta;
if (!Size.Contains(adjacentPos))
continue;
var adjacent = ChunkStates[adjacentPos.x, adjacentPos.y, adjacentPos.z];
if (adjacent.UpdateCompatibleFrom(chunk.CompatibleAdjacent[dir]))
{
_propagationStack.Push(adjacentPos);
ChunkStates[adjacentPos.x, adjacentPos.y, adjacentPos.z] = adjacent;
}
}
yield return null;
}
}
}
}
<|start_filename|>Assets/Scripts/Tilemap3D/GameObjectTilemap.cs<|end_filename|>
using System;
using System.Collections.Generic;
using SardineFish.Utils;
using UnityEngine;
using UnityEngine.Tilemaps;
namespace WFC.Tilemap3D
{
public class GameObjectTilemap : MonoBehaviour, ICustomEditorEX
{
public int ChunkSize = 8;
private readonly Dictionary<Vector3Int, TileChunk> Chunks = new Dictionary<Vector3Int, TileChunk>();
[DisplayInInspector("Total tiles")]
public int Count { get; private set; }
public BoundsInt Bounds { get; private set; }
private void Awake()
{
ReloadTileFromChildren();
}
[EditorButton]
public void ReloadTileFromChildren()
{
Chunks.Clear();
Count = 0;
var tiles = GetComponentsInChildren<GameObjectTile>();
foreach (var tile in tiles)
{
var pos = tile.transform.localPosition.FloorToVector3Int();
var (chunkPos, offset) = ChunkAt(pos);
tile.SetPosInternal(chunkPos, offset, pos);
SetTileInstance(pos, tile);
}
}
[EditorButton]
void ClearAll()
{
ClearAllTiles();
}
public void SetTile(Vector3Int pos, GameObjectTile prefab)
{
if (!prefab)
{
RemoveTile(pos);
return;
}
var (chunkPos, offset) = ChunkAt(pos);
var tile = prefab.CreateInstance(chunkPos, offset, pos);
SetTileInstance(pos, tile);
}
void SetTileInstance(Vector3Int pos, GameObjectTile tile)
{
var (chunkPos, offset) = ChunkAt(pos);
var chunk = GetOrCreateChunk(chunkPos);
tile.transform.SetParent(transform, false);
tile.transform.localPosition = pos;
if (!chunk.SetTile(offset, tile))
{
Count++;
if (Count == 1)
Bounds = new BoundsInt(pos, Vector3Int.one);
else
Bounds = Bounds.Encapsulate(pos);
}
}
public void RemoveTile(Vector3Int pos)
{
var (chunkPos, offset) = ChunkAt(pos);
if (!Chunks.TryGetValue(chunkPos, out var chunk))
return;
var tile = chunk.RemoveTile(offset);
if (tile)
{
tile.DestroyInstance();
Count--;
}
}
public void ClearAllTiles()
{
foreach (var chunk in Chunks.Values)
{
foreach (var tile in chunk.TileList)
{
chunk.Tiles[tile.ChunkOffset.x, tile.ChunkOffset.y, tile.ChunkOffset.z] = null;
tile.DestroyInstance();
}
chunk.TileList.Clear();
}
Chunks.Clear();
Count = 0;
}
public GameObjectTile GetTile(Vector3Int pos)
{
var (chunkPos, offset) = ChunkAt(pos);
if (!Chunks.TryGetValue(chunkPos, out var chunk))
return null;
return chunk[offset];
}
public GameObjectTile RayMarch(Ray ray, int distance)
{
return RayMarch(ray, distance, out _, out _);
}
public GameObjectTile RayMarch(Ray ray, int distance, out Vector3Int hitPos, out Vector3Int hitNormal)
{
foreach (var (pos, normal) in Utility.VoxelRayMarching(ray, distance))
{
var tile = GetTile(pos);
if (tile)
{
hitNormal = normal;
hitPos = pos;
return tile;
}
}
hitNormal = Vector3Int.zero;
hitPos = Vector3Int.zero;
return null;
}
(Vector3Int chunkPos, Vector3Int offset) ChunkAt(Vector3Int pos)
{
var floorOffset = new Vector3Int(
pos.x < 0 ? 1 : 0,
pos.y < 0 ? 1 : 0,
pos.z < 0 ? 1 : 0
);
return (pos / ChunkSize - floorOffset, new Vector3Int(
FloorReminder(pos.x, ChunkSize),
FloorReminder(pos.y, ChunkSize),
FloorReminder(pos.z, ChunkSize)
));
}
static int FloorReminder(int x, int m) =>
x >= 0
? x % m
: (m + x % m) % m;
TileChunk GetOrCreateChunk(Vector3Int chunkPos)
{
if (Chunks.TryGetValue(chunkPos, out var chunk))
return chunk;
var newChunk = new TileChunk(ChunkSize);
Chunks[chunkPos] = newChunk;
return newChunk;
}
private void OnDrawGizmosSelected()
{
Gizmos.DrawWireCube(Bounds.center, Bounds.size);
}
}
}
<|start_filename|>Assets/Scripts/WFC/ChunkState.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
namespace WFC
{
public struct ChunkState<TPattern> where TPattern : IPattern<TPattern>
{
public HashSet<TPattern> Compatibles;
public HashSet<TPattern>[] CompatibleAdjacent;
public float Entropy;
public bool Definite;
private TPattern _pattern;
/// <summary>
/// Get collapsed pattern
/// </summary>
/// <exception cref="Exception"></exception>
public TPattern Pattern
{
get
{
if (!Definite)
throw new Exception("Chunk not determined.");
return _pattern;
}
}
/// <summary>
/// Construct the chunk state with all possible patterns.
/// </summary>
/// <param name="patterns"></param>
public ChunkState(IEnumerable<TPattern> patterns, int adjacentCount)
{
Definite = false;
Compatibles = new HashSet<TPattern>(patterns);
CompatibleAdjacent = new HashSet<TPattern>[adjacentCount];
for (var i = 0; i < adjacentCount; i++)
CompatibleAdjacent[i] = new HashSet<TPattern>();
Entropy = 0;
_pattern = default;
UpdateEntropy();
UpdateAdjacent();
}
/// <summary>
/// Remove a possible pattern in this chunk and update the entropy.
/// The return value indicate whether this chunk has any changes.
/// </summary>
/// <param name="pattern"></param>
/// <returns></returns>
public bool Ban(TPattern pattern)
{
if (Compatibles.Remove(pattern))
{
UpdateEntropy();
return true;
}
return false;
}
/// <summary>
/// Update the compatible patterns of this chunk from given patterns set.
/// The return value indicate whether compatible patterns has any changes.
/// Will also update compatible adjacent if necessary.
/// </summary>
/// <param name="patterns"></param>
/// <returns></returns>
public bool UpdateCompatibleFrom(IEnumerable<TPattern> patterns)
{
if (!Compatibles.IsSubsetOf(patterns))
{
Compatibles.IntersectWith(patterns);
UpdateAdjacent();
UpdateEntropy();
return true;
}
return false;
}
/// <summary>
/// Collapse this chunk to specific pattern, the entropy will set to zero
/// </summary>
/// <param name="pattern"></param>
/// <returns></returns>
public void CollapseTo(TPattern pattern)
{
Compatibles.Clear();
Compatibles.Add(pattern);
_pattern = pattern;
Entropy = 0;
Definite = true;
UpdateAdjacent();
}
/// <summary>
/// Force update the compatible adjacent pattern of this chunk.
/// </summary>
void UpdateAdjacent()
{
for (var i = 0; i < CompatibleAdjacent.Length; i++)
{
CompatibleAdjacent[i].Clear();
foreach (var pattern in Compatibles)
{
CompatibleAdjacent[i].UnionWith(pattern.GetAdjacent(i));
}
}
}
/// <summary>
/// Force update the entropy of this chunk.
/// </summary>
void UpdateEntropy()
{
var sumOfWeight = Compatibles.Sum(pattern => pattern.Weight);
var sumOfWeightLogWeight = Compatibles.Sum(pattern => pattern.Weight * Mathf.Log(pattern.Weight));
Entropy = Mathf.Log(sumOfWeight) - sumOfWeightLogWeight / sumOfWeight;
}
}
}
<|start_filename|>Assets/Scripts/Utils/Editor/EditorButton.cs<|end_filename|>
using UnityEngine;
using System.Collections;
using System.Reflection;
using UnityEditor;
using SardineFish.Utils;
namespace SardineFish.Utils.Editor
{
[CustomAttributeEditor(typeof(EditorButtonAttribute))]
public class ButtonEditor : AttributeEditor
{
public override void OnEdit(MemberInfo member, CustomEditorAttribute attr)
{
var method = member as MethodInfo;
var buttonAttr = attr as EditorButtonAttribute;
if (method is null)
return;
var lable = buttonAttr.Label == "" ? member.Name : buttonAttr.Label;
if (GUILayout.Button(lable))
method.Invoke(target, null);
}
}
}
<|start_filename|>Assets/Scripts/WFC/TileWeight.cs<|end_filename|>
using UnityEngine;
namespace WFC
{
public class TileWeight : MonoBehaviour
{
public float Weight;
}
}
<|start_filename|>Assets/Scripts/WFC/Tilemap3DPattern.cs<|end_filename|>
using System;
using System.Collections.Generic;
using JetBrains.Annotations;
using SardineFish.Utils;
using UnityEngine;
using WFC.Tilemap3D;
namespace WFC
{
[RequireComponent(typeof(Tilemap3D.GameObjectTilemap))]
public class Tilemap3DPattern : MonoBehaviour, ICustomEditorEX
{
public bool IncludeEmptyTile = true;
public GameObjectTile BoundaryTile;
private GameObjectTilemap _tilemap;
private readonly Dictionary<GameObjectTile, Pattern<GameObjectTile>> _patterns =
new Dictionary<GameObjectTile, Pattern<GameObjectTile>>();
public List<Pattern<GameObjectTile>> Patterns { get; } = new List<Pattern<GameObjectTile>>();
private Pattern<GameObjectTile> _boundaryPattern;
public Pattern<GameObjectTile> BoundaryPattern
{
get
{
if (_boundaryPattern.NotNull())
return _boundaryPattern;
if (!BoundaryTile)
return null;
_boundaryPattern = new Pattern<GameObjectTile>(BoundaryTile, NeighborOffset.Length);
return _boundaryPattern;
}
}
[DisplayInInspector()]
private int PatternCount => _patterns.Count;
public Vector3Int[] NeighborOffset => AdjacentOffset;
private static readonly Vector3Int[] AdjacentOffset = new[]
{
Vector3Int.left, Vector3Int.right,
Vector3Int.down, Vector3Int.up,
new Vector3Int(0, 0, -1), new Vector3Int(0, 0, 1),
};
private void Awake()
{
_tilemap = GetComponent<GameObjectTilemap>();
}
private Pattern<GameObjectTile> _emptyPattern;
Pattern<GameObjectTile> GetOrCreateEmptyPattern()
{
if (_emptyPattern is null)
{
_emptyPattern = new Pattern<GameObjectTile>(null, NeighborOffset.Length);
for (var idx = 0; idx < AdjacentOffset.Length; idx++)
{
_emptyPattern.Neighbors[idx].Add(_emptyPattern);
}
}
return _emptyPattern;
}
Pattern<GameObjectTile> GetOrCreatePattern(GameObjectTile tile)
{
if (_patterns.TryGetValue(tile.Prefab, out var pattern))
return pattern;
if (tile.Prefab == BoundaryTile)
return BoundaryPattern;
var weight = 1f;
if (tile.Prefab.GetComponent<TileWeight>() is TileWeight tileWeight && tileWeight)
weight = tileWeight.Weight;
var newPattern = new Pattern<GameObjectTile>(tile.Prefab, NeighborOffset.Length, weight);
_patterns.Add(tile.Prefab, newPattern);
return newPattern;
}
public void ExtractPatterns()
{
if (!_tilemap)
_tilemap = GetComponent<GameObjectTilemap>();
_patterns.Clear();
Patterns.Clear();
foreach (var pos in _tilemap.Bounds.allPositionsWithin)
{
var tile = _tilemap.GetTile(pos);
if(!tile)
continue;
var pattern = GetOrCreatePattern(tile);
for (var idx = 0; idx < NeighborOffset.Length; idx++)
{
var neighborPos = pos + NeighborOffset[idx];
var neighborTile = _tilemap.GetTile(neighborPos);
if (!neighborTile)
{
if (IncludeEmptyTile)
{
var emptyPattern = GetOrCreateEmptyPattern();
pattern.Neighbors[idx].Add(emptyPattern);
emptyPattern.Neighbors[NeighborOffset.IndexOf(-NeighborOffset[idx])].Add(pattern);
}
continue;
}
var neighborPattern = GetOrCreatePattern(neighborTile);
pattern.Neighbors[idx].Add(neighborPattern);
}
}
foreach(var pattern in _patterns.Values)
Patterns.Add(pattern);
if (IncludeEmptyTile)
Patterns.Add(GetOrCreateEmptyPattern());
}
}
}
<|start_filename|>Assets/Scripts/WFC/TilemapPattern.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using SardineFish.Utils;
using UnityEngine;
using UnityEngine.Tilemaps;
using SardineFish.Utils;
namespace WFC
{
[RequireComponent(typeof(Tilemap))]
public class TilemapPattern : MonoBehaviour, ICustomEditorEX
{
public bool IncludeCorner = false;
private readonly Dictionary<TileBase, Pattern<TileBase>> _patterns =
new Dictionary<TileBase, Pattern<TileBase>>();
private static readonly Vector3Int[] Adjacent = new[]
{
Vector3Int.right,
Vector3Int.up,
Vector3Int.left,
Vector3Int.down,
};
private static readonly Vector3Int[] AdjacentWithCorner = new[]
{
Vector3Int.right,
Vector3Int.right + Vector3Int.up,
Vector3Int.up,
Vector3Int.up + Vector3Int.left,
Vector3Int.left,
Vector3Int.left + Vector3Int.down,
Vector3Int.down,
Vector3Int.down + Vector3Int.right,
};
public Vector3Int[] NeighborOffset => IncludeCorner ? AdjacentWithCorner : Adjacent;
public IEnumerable<Pattern<TileBase>> Patterns => _patterns.Values;
private Tilemap _tilemap;
private void Awake()
{
_tilemap = GetComponent<Tilemap>();
ExtractPatterns();
}
Pattern<TileBase> GetOrCreatePattern(TileBase tile)
{
if (_patterns.TryGetValue(tile, out var pattern))
return pattern;
Pattern<TileBase> newPattern =
IncludeCorner
? new Pattern<TileBase>(tile, 8)
: new Pattern<TileBase>(tile, 4);
_patterns.Add(tile, newPattern);
return newPattern;
}
[EditorButton]
public void ExtractPatterns()
{
_patterns.Clear();
var bounds = _tilemap.cellBounds;
var up = Vector2Int.up.ToVector3Int();
var left = Vector2Int.left.ToVector3Int();
var right = Vector2Int.right.ToVector3Int();
var down = Vector2Int.down.ToVector3Int();
foreach (var pos in bounds.Iter())
{
var tile = _tilemap.GetTile(pos);
if (!tile)
continue;
var pattern = GetOrCreatePattern(tile);
for (var idx = 0; idx < NeighborOffset.Length; idx++)
{
if (_tilemap.GetTile(pos + NeighborOffset[idx]) is TileBase adjacentTile)
pattern.Neighbors[idx].Add(GetOrCreatePattern(adjacentTile));
}
}
}
}
}
<|start_filename|>Assets/Scripts/Test/TestRayMarching.cs<|end_filename|>
using System;
using SardineFish.Utils;
using UnityEngine;
namespace WFC.Test
{
[ExecuteInEditMode]
public class TestRayMarching : MonoBehaviour
{
public Transform From;
public Transform To;
private void Update()
{
}
private void OnDrawGizmos()
{
if(!From || !To)
return;
var dir = To.position - From.position;
var ray = new Ray(From.position, dir);
var distance = Mathf.Abs(Mathf.CeilToInt(dir.x)) + Math.Abs(Mathf.CeilToInt(dir.y)) +
Mathf.Abs(Mathf.CeilToInt(dir.z));
Gizmos.DrawLine(From.position, To.position);
Gizmos.color = Color.cyan.WithAlpha(0.3f);
foreach (var (pos, normal) in Utility.VoxelRayMarching(ray, distance)
)
{
Gizmos.DrawCube(pos + (Vector3.one / 2), Vector3.one);
Gizmos.DrawRay(pos + (Vector3.one / 2), normal);
}
}
}
}
<|start_filename|>Assets/Scripts/Tilemap3D/TileChunk.cs<|end_filename|>
using System.Collections.Generic;
using UnityEngine;
namespace WFC.Tilemap3D
{
public class TileChunk
{
public readonly GameObjectTile[,,] Tiles;
public readonly List<GameObjectTile> TileList;
public TileChunk(int chunkSize)
{
Tiles = new GameObjectTile[chunkSize, chunkSize,chunkSize];
TileList = new List<GameObjectTile>(chunkSize * chunkSize * chunkSize);
}
public GameObjectTile this[int x, int y, int z]
{
get => Tiles[x, y, z];
// set => Tiles[x, y, z] = value;
}
public GameObjectTile this[Vector3Int pos]
{
get => Tiles[pos.x, pos.y, pos.z];
// set => Tiles[pos.x, pos.y, pos.z] = value;
}
public GameObjectTile SetTile(Vector3Int offset, GameObjectTile tile)
{
var oldTile = this[offset];
if (oldTile)
TileList[oldTile.InChunkId] = tile;
else
TileList.Add(tile);
Tiles[offset.x, offset.y, offset.z] = tile;
return oldTile;
}
public GameObjectTile RemoveTile(Vector3Int offset)
{
var tile = this[offset];
if (tile)
{
if (TileList.Count > 1 && tile.InChunkId != TileList.Count - 1)
{
TileList[tile.InChunkId] = TileList[TileList.Count - 1];
TileList[tile.InChunkId].InChunkId = tile.InChunkId;
}
TileList.RemoveAt(TileList.Count - 1);
}
return tile;
}
}
}
<|start_filename|>Assets/Scripts/Utils/CoroutineRunner.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
namespace SardineFish.Utils
{
public class ParallelCoroutineRunner
{
private List<CoroutineRunner> runners = new List<CoroutineRunner>(16);
public bool Aborted { get; private set; } = false;
public bool Completed { get; private set; } = false;
public bool Running => !Aborted && !Completed;
public bool Tick()
{
if (Aborted)
return false;
var keepRunning = false;
for (var i = 0; i < runners.Count; i++)
{
if (!runners[i].Running)
continue;
keepRunning |= runners[i].Tick();
if (Aborted)
return false;
}
if (!keepRunning)
{
Completed = true;
return false;
}
return true;
}
public void Append(IEnumerator coroutine)
{
if (Completed || Aborted)
{
Debug.LogWarning("Attempt to add coroutine to terminated runner.");
return;
}
runners.Add(new CoroutineRunner(coroutine));
}
public void Abort()
{
if (!Completed)
{
Aborted = true;
foreach(var runner in runners)
runner.Abort();
}
}
}
public class CoroutineRunner
{
Stack<IEnumerator> runStack = new Stack<IEnumerator>();
private IEnumerator iterator;
private bool shouldPop = false;
public bool Aborted { get; private set; }
public bool Completed { get; private set; }
public bool Running => !Aborted && !Completed;
public event Action OnAbort;
public CoroutineRunner(IEnumerator coroutine)
{
runStack.Push(coroutine);
shouldPop = true;
}
public bool Tick()
{
if (Aborted)
{
Completed = false;
return false;
}
do
{
if (shouldPop)
{
iterator = runStack.Pop();
shouldPop = false;
}
for (var state = iterator.MoveNext(); state; state = iterator.MoveNext())
{
if (iterator.Current is null)
{
return true;
}
else if (iterator.Current is IEnumerator next)
{
runStack.Push(iterator);
runStack.Push(next);
shouldPop = true;
break;
}
}
shouldPop = true;
} while (runStack.Count > 0);
Completed = true;
Aborted = false;
return false;
}
public void Run()
{
while (Tick()) ;
}
public void Abort()
{
Aborted = true;
OnAbort?.Invoke();
}
public static void Run(IEnumerator coroutine)
{
Stack<IEnumerator> runStack = new Stack<IEnumerator>();
runStack.Push(coroutine);
while (runStack.Count > 0)
{
var iterator = runStack.Pop();
for (var state = iterator.MoveNext(); state; state = iterator.MoveNext())
{
if(iterator.Current is null)
continue;
else if (iterator.Current is IEnumerator next)
{
runStack.Push(iterator);
runStack.Push(next);
break;
}
}
}
}
public static IEnumerator RunProgressive(IEnumerator coroutine)
{
Stack<IEnumerator> runStack = new Stack<IEnumerator>();
runStack.Push(coroutine);
while (runStack.Count > 0)
{
var iterator = runStack.Pop();
for (var state = iterator.MoveNext(); state; state = iterator.MoveNext())
{
if (iterator.Current is null)
{
yield return null;
}
else if (iterator.Current is IEnumerator next)
{
runStack.Push(iterator);
runStack.Push(next);
break;
}
}
}
}
public static IEnumerator All(IEnumerable<IEnumerator> coroutines)
{
var list = new List<IEnumerator>();
list.Clear();
list.AddRange(coroutines.Select(RunProgressive));
bool keepRunning = true;
while (keepRunning)
{
keepRunning = false;
foreach (var coroutine in list)
{
keepRunning |= coroutine.MoveNext();
}
if (!keepRunning)
break;
yield return null;
}
}
}
}
<|start_filename|>Assets/Scripts/Utils/ObjectPool.cs<|end_filename|>
using System.Collections.Generic;
namespace SardineFish.Utils
{
public static class ObjectPool<T> where T : new()
{
private static Stack<T> pool = new Stack<T>();
public static T Get()
{
if (pool.Count > 0)
return pool.Pop();
return new T();
}
public static void Release(T obj)
=> pool.Push(obj);
}
}
<|start_filename|>Assets/Scripts/Tilemap3D/GameObjectTile.cs<|end_filename|>
using System;
using SardineFish.Utils;
using UnityEditor;
using UnityEngine;
namespace WFC.Tilemap3D
{
[ExecuteInEditMode]
[SelectionBase]
public class GameObjectTile : MonoBehaviour
{
public Vector3Int Chunk { get; private set; }
public Vector3Int ChunkOffset { get; private set; }
public Vector3Int Position { get; private set; }
public int InChunkId { get; set; }
[HideInInspector] [SerializeField] private GameObjectTile _prefab;
public GameObjectTile Prefab
{
get => _prefab;
private set => _prefab = value;
}
public GameObjectTile()
{
Prefab = this;
}
public GameObjectTile CreateInstance(Vector3Int chunkPos, Vector3Int offset, Vector3Int pos)
{
if (!Prefab)
Prefab = this;
var tile = GameObjectPool.Get<GameObjectTile>(gameObject);
tile.Prefab = Prefab;
tile.SetPosInternal(chunkPos, offset, pos);
return tile;
}
public void DestroyInstance()
{
if (Application.isPlaying)
{
GameObjectPool.Release(Prefab.gameObject, gameObject);
}
else
Undo.DestroyObjectImmediate(gameObject);
}
internal void SetPosInternal(Vector3Int chunkPos, Vector3Int offset, Vector3Int pos)
{
Chunk = chunkPos;
ChunkOffset = offset;
Position = pos;
}
private void Update()
{
}
}
}
<|start_filename|>Assets/Scripts/WFC/Chunk2D.cs<|end_filename|>
using UnityEngine;
namespace WFC
{
public struct Chunk2D<T>
{
public T Data;
public Vector2Int Position;
public int Orientation;
}
}
<|start_filename|>Assets/Scripts/WFC/IPattern.cs<|end_filename|>
using System.Collections.Generic;
namespace WFC
{
public interface IPattern<TAdjacent> where TAdjacent : IPattern<TAdjacent>
{
float Weight { get; }
IEnumerable<TAdjacent> GetAdjacent(int i);
}
}
<|start_filename|>Assets/Scripts/Editor/src/WrapObjectTile.cs<|end_filename|>
using SardineFish.Utils;
using UnityEditor;
using UnityEngine;
using WFC.Tilemap3D;
namespace WFC.Editor
{
public class WrapObjectTile
{
[MenuItem("Utils/WrapSelectedObjectToTile")]
private static void WrapToTile()
{
var objs = Selection.gameObjects;
if (objs.Length > 0)
{
Undo.RecordObjects(objs, "Wrap to GameObjectTile");
}
foreach (var obj in Selection.gameObjects)
{
var tile = new GameObject($"Tile-{obj.name}");
tile.AddComponent<GameObjectTile>();
tile.transform.position = obj.transform.position.FloorToVector3Int();
tile.transform.SetParent(obj.transform.parent);
obj.transform.SetParent(tile.transform);
}
}
}
}
<|start_filename|>Assets/Scripts/Utils/GameObjectPool.cs<|end_filename|>
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Assertions;
namespace SardineFish.Utils
{
public class GameObjectPool : MonoBehaviour
{
private Dictionary<GameObject, Pool> prefabPools = new Dictionary<GameObject, Pool>();
private Dictionary<Type, Pool> _perComponentPools = new Dictionary<Type, Pool>();
private static GameObjectPool _instance;
private static GameObjectPool Instance
{
get
{
if (!_instance)
_instance = CreateGameObjectPoolRoot();
return _instance;
}
}
#region Types
private static class PerComponentPool<T> where T : Component
{
public static Pool ObjectPool;
private static string DefaultName = "[GameObject]";
public static T Get()
=> Get(DefaultName);
public static T Get(string name)
{
if (Application.isPlaying)
return GetOrCreatePool().Get(name).GetComponent<T>();
else
return Allocator().GetComponent<T>();
}
public static void Release(T component)
{
if (Application.isPlaying)
GetOrCreatePool().Release(component.gameObject);
else
DestroyImmediate(component.gameObject);
}
public static void PreAlloc(int count)
{
if (Application.isPlaying)
GetOrCreatePool().PreAlloc(count);
}
static Pool GetOrCreatePool()
{
if(ObjectPool is null)
CreatePool();
return ObjectPool;
}
static void CreatePool()
{
var container = new GameObject("[Pool]" + typeof(T).Name);
container.transform.SetParent(GameObjectPool.Instance.transform, false);
ObjectPool = new Pool(container, Allocator);
Instance._perComponentPools.Add(typeof(T), ObjectPool);
}
static GameObject Allocator()
{
var obj = new GameObject(DefaultName);
obj.AddComponent<T>();
return obj;
}
}
private class Pool
{
public string defaultObjectName = "[GameObject]";
public Func<GameObject> Allocator;
public GameObject ObjectCollection;
Stack<GameObject> objectPool = new Stack<GameObject>();
public Pool(GameObject objectCollection, Func<GameObject> allocator)
{
ObjectCollection = objectCollection;
Allocator = allocator;
}
GameObject CreateObject()
{
var newObj = Allocator.Invoke();
newObj.name = defaultObjectName;
return newObj;
}
public GameObject Get()
{
if (objectPool.Count > 0)
{
var obj = objectPool.Pop();
obj.SetActive(true);
// obj.transform.parent = null;
obj.transform.SetParent(null);
return obj;
}
return CreateObject();
}
public GameObject Get(string name)
{
var obj = Get();
obj.name = name;
return obj;
}
public void Release(GameObject obj)
{
if (!obj)
return;
// obj.transform.parent = transform;
obj.transform.SetParent(ObjectCollection.transform, false);
obj.SetActive(false);
objectPool.Push(obj);
}
public void PreAlloc(int count)
{
for (var i = 0; i < count; i++)
{
var obj = CreateObject();
Release(obj);
}
}
}
#endregion
#region PrefabPool
public static GameObject Get(GameObject prefab)
{
if (Application.isPlaying)
return GetOrCreatePrefabPool(prefab).Get();
else
return Instantiate(prefab);
}
public static GameObject Get(GameObject prefab, string name)
{
if(Application.isPlaying)
return GetOrCreatePrefabPool(prefab).Get(name);
else
{
var obj = Instantiate(prefab);
obj.name = name;
return obj;
}
}
public static void Release(GameObject prefab, GameObject obj)
{
if (Application.isPlaying)
GetOrCreatePrefabPool(prefab).Release(obj);
else
DestroyImmediate(obj);
}
public static T Get<T>(GameObject prefab) where T : Component
=> Get(prefab)?.GetComponent<T>();
public static T Get<T>(GameObject prefab, string name) where T : Component
=> Get(prefab, name).GetComponent<T>();
public static void Release<T>(GameObject prefab, T component) where T : Component
{
if(component && component.gameObject)
Release(prefab, component.gameObject);
}
public static void PreAlloc(GameObject prefab, int count)
{
if (Application.isPlaying)
GetOrCreatePrefabPool(prefab).PreAlloc(count);
}
static Pool GetOrCreatePrefabPool(GameObject prefab)
{
Assert.IsTrue(Application.isPlaying);
if (Instance.prefabPools.ContainsKey(prefab))
{
var existedPool = Instance.prefabPools[prefab];
if (!(existedPool is null))
return existedPool;
}
var pool = CreatePrefabPool(prefab);
Instance.prefabPools[prefab] = pool;
return pool;
}
static Pool CreatePrefabPool(GameObject prefab)
{
var pool = new Pool(new GameObject(), ()=>Instantiate(prefab));
pool.ObjectCollection.transform.parent = Instance.transform;
pool.defaultObjectName = prefab.name;
pool.ObjectCollection.name = "[Pool]" + prefab.name;
return pool;
}
#endregion
#region PerComponentPool
public static T Get<T>(string name) where T : Component
=> PerComponentPool<T>.Get(name);
public static T Get<T>() where T : Component
=> PerComponentPool<T>.Get();
public static void Release<T>(T component) where T : Component
=> PerComponentPool<T>.Release(component);
public static void PreAlloc<T>(int count) where T : Component
=> PerComponentPool<T>.PreAlloc(count);
#endregion
// private void OnDestroy()
// {
// _instance = null;
// }
static GameObjectPool CreateGameObjectPoolRoot()
{
Assert.IsTrue(Application.isPlaying);
var obj = new GameObject();
obj.name = "[GameObjectPool]";
var pool = obj.AddComponent<GameObjectPool>();
DontDestroyOnLoad(obj);
return pool;
}
}
}
<|start_filename|>Assets/Scripts/WFC/Pattern.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace WFC
{
public enum Orientation2D : int
{
Right = 0,
Up = 1,
Left = 2,
Down = 3,
}
public class Pattern<T> : IPattern<Pattern<T>>
{
public T Chunk { get; }
public readonly HashSet<Pattern<T>>[] Neighbors;
// = new[]
// {
// new HashSet<Pattern<T>>(),
// new HashSet<Pattern<T>>(),
// new HashSet<Pattern<T>>(),
// new HashSet<Pattern<T>>(),
// };
// public HashSet<Pattern<T>> Up => Neighbors[(int) Orientation2D.Up];
//
// public HashSet<Pattern<T>> Left => Neighbors[(int) Orientation2D.Left];
//
// public HashSet<Pattern<T>> Right => Neighbors[(int) Orientation2D.Right];
//
// public HashSet<Pattern<T>> Down => Neighbors[(int) Orientation2D.Down];
public Pattern(T chunkData, int adjacentCount, float weight = 1)
{
Chunk = chunkData;
Neighbors = new HashSet<Pattern<T>>[adjacentCount];
Weight = weight;
for (var i = 0; i < adjacentCount; i++)
{
Neighbors[i] = new HashSet<Pattern<T>>();
}
}
public float Weight { get; }
public IEnumerable<Pattern<T>> GetAdjacent(int i) => Neighbors[i];
}
}
<|start_filename|>Assets/Scripts/WFC/WFCTilemap3DGenerator.cs<|end_filename|>
using System;
using System.Collections;
using SardineFish.Utils;
using UnityEngine;
using WFC.Tilemap3D;
namespace WFC
{
[RequireComponent(typeof(GameObjectTilemap))]
public class WFCTilemap3DGenerator : MonoBehaviour, ICustomEditorEX
{
public int Seed;
public BoundsInt Bounds;
public Tilemap3DPattern PatternGenerator;
private CoroutineRunner _coroutineRunner;
private WFCGenerator<GameObjectTile> _generator;
private GameObjectTilemap _tilemap;
private void Awake()
{
_tilemap = GetComponent<GameObjectTilemap>();
}
[EditorButton]
public void Random()
{
Seed = new System.Random().Next();
}
[EditorButton]
public void Generate()
{
if(!PatternGenerator)
return;
_tilemap.ClearAllTiles();
PatternGenerator.ExtractPatterns();
_generator = new WFCGenerator<GameObjectTile>(Bounds.size, PatternGenerator.NeighborOffset, PatternGenerator.Patterns);
StartCoroutine(RunProgressive());
}
IEnumerator RunProgressive()
{
_generator.Reset(Seed);
if (PatternGenerator.BoundaryPattern is Pattern<GameObjectTile> boundPattern)
{
_generator.SetBoundaryPattern(boundPattern);
}
foreach (var pos in _generator.RunProgressive())
{
var tile = _generator.ChunkStates[pos.x, pos.y, pos.z].Pattern.Chunk;
_tilemap.SetTile(pos + Bounds.min, tile);
yield return null;
}
}
private void OnDrawGizmos()
{
Gizmos.color = Color.cyan;
Gizmos.DrawWireCube(Bounds.center, Bounds.size);
}
}
}
<|start_filename|>Assets/Scripts/WFC/WFCTilemapGenerator.cs<|end_filename|>
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using SardineFish.Utils;
using UnityEngine;
using UnityEngine.Tilemaps;
namespace WFC
{
[RequireComponent(typeof(Tilemap))]
public class WFCTilemapGenerator : MonoBehaviour, ICustomEditorEX
{
public int Seed;
public TilemapPattern TilemapPattern;
public BoundsInt Bounds;
public bool ShowSuperposition = false;
private Tilemap _tilemap;
private WFCGenerator<TileBase> _generator;
private List<Tilemap> _stateMaps = new List<Tilemap>();
private CoroutineRunner CoroutineRunner;
private void Awake()
{
_tilemap = GetComponent<Tilemap>();
}
[EditorButton]
public void Generate()
{
if (!TilemapPattern)
return;
// if(_generator is null)
_tilemap.ClearAllTiles();
TilemapPattern.ExtractPatterns();
_generator = new WFCGenerator<TileBase>(Bounds.size.ToVector2Int().ToVector3Int(1), TilemapPattern.NeighborOffset, TilemapPattern.Patterns);
foreach(var tilemap in _stateMaps)
tilemap.ClearAllTiles();
RandomSeed();
StartCoroutine(GenerateProgressive());
// CoroutineRunner = new CoroutineRunner(GenerateProgressive());
}
[EditorButton]
public void Step()
{
if(CoroutineRunner is null)
return;
CoroutineRunner.Tick();
}
IEnumerator GenerateProgressive()
{
_generator.Reset(Seed);
yield return null;
foreach (var collapsedChunk in _generator.RunProgressive())
{
var pos = Bounds.min + collapsedChunk;
var tile = _generator.ChunkStates[collapsedChunk.x, collapsedChunk.y, collapsedChunk.z].Pattern.Chunk;
_tilemap.SetTile(pos, tile);
if(ShowSuperposition)
DrawSuperposition();
yield return null;
}
}
void DrawSuperposition()
{
if (_stateMaps.Count < _generator.Patterns.Count)
{
for (var i = _stateMaps.Count; i < _generator.Patterns.Count; i++)
{
var obj = new GameObject();
obj.transform.parent = transform;
obj.transform.position = transform.position + Vector3.forward * (i + 1);
var tilemap = obj.AddComponent<Tilemap>();
var renderer = obj.AddComponent<TilemapRenderer>();
tilemap.color = Color.white.WithAlpha(0.7f);
_stateMaps.Add(tilemap);
}
}
for (var x = 0; x < _generator.Size.x; x++)
for (var y = 0; y < _generator.Size.y; y++)
{
var p = Bounds.min + new Vector3Int(x, y, 0);
var idx = 0;
foreach (var pattern in _generator.ChunkStates[x, y, 0].Compatibles)
{
_stateMaps[idx++].SetTile(p, pattern.Chunk);
}
}
}
[EditorButton()]
public void RandomSeed()
{
Seed = new System.Random().Next();
}
private void OnDrawGizmos()
{
Gizmos.color = Color.cyan;
Gizmos.DrawWireCube(transform.position + Bounds.center, Bounds.size);
}
}
} | SardineFish/WFC-Demo |
<|start_filename|>project.clj<|end_filename|>
(defproject evosec/firebird-driver "1.3.0"
:min-lein-version "2.5.0"
:dependencies
[[org.firebirdsql.jdbc/jaybird "4.0.2.java8"]]
:profiles
{:provided
{:dependencies [[metabase-core "1.0.0-SNAPSHOT"]]}
:uberjar
{:auto-clean true
:aot :all
:javac-options ["-target" "1.8", "-source" "1.8"]
:target-path "target/%s"
:uberjar-name "firebird.metabase-driver.jar"}})
<|start_filename|>test/metabase/test/data/firebird.clj<|end_filename|>
(ns metabase.test.data.firebird
"Code for creating / destroying a FirebirdSQL database from a `DatabaseDefinition`."
(:require [clojure.string :as str]
[clojure.java.jdbc :as jdbc]
[metabase.driver.sql-jdbc.connection :as sql-jdbc.conn]
[metabase.test.data
[interface :as tx]
[sql :as sql.tx]
[sql-jdbc :as sql-jdbc.tx]]
[metabase.test.data.sql-jdbc
[load-data :as load-data]
[execute :as execute]]
[metabase.util :as u]))
(sql-jdbc.tx/add-test-extensions! :firebird)
(defmethod tx/dbdef->connection-details :firebird [_ context {:keys [database-name]}]
{:host (tx/db-test-env-var-or-throw :firebird :host "localhost")
:port (tx/db-test-env-var-or-throw :firebird :port 3050)
:user (tx/db-test-env-var-or-throw :firebird :user "SYSDBA")
:password (tx/db-test-env-var-or-throw :firebird :password "<PASSWORD>")
:db (tx/db-test-env-var-or-throw :firebird :db "metabase-testing")
:additional-options (tx/db-test-env-var-or-throw :firebird :additional-options "charSet=UTF-8")})
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/BigInteger] [_ _] "BIGINT")
;; Boolean was added in firebird 3, maybe use smallint for firebird 2 compatibility?
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/Boolean] [_ _] "BOOLEAN")
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/Date] [_ _] "DATE")
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/DateTime] [_ _] "TIMESTAMP")
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/Decimal] [_ _] "DECIMAL")
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/Float] [_ _] "FLOAT")
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/Integer] [_ _] "INTEGER")
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/Text] [_ _] "VARCHAR(255)")
(defmethod sql.tx/field-base-type->sql-type [:firebird :type/Time] [_ _] "TIME")
(defmethod sql.tx/pk-sql-type :firebird [_] "INTEGER GENERATED BY DEFAULT AS IDENTITY")
(defmethod sql.tx/pk-field-name :firebird [_] "id")
;; Use RECREATE TABLE to drop a table if it exists, in case some tables have not been dropped before
;; running tests
(defmethod sql.tx/create-table-sql :firebird
[driver {:keys [database-name], :as dbdef} {:keys [table-name field-definitions table-comment]}]
(let [pk-field-name (sql.tx/pk-field-name driver)]
(format "RECREATE TABLE %s (\"%s\" %s PRIMARY KEY, %s)"
(sql.tx/qualify+quote-name driver database-name table-name)
pk-field-name
(sql.tx/pk-sql-type driver)
(->> field-definitions
(map (fn [{:keys [field-name base-type field-comment]}]
(format "\"%s\" %s"
field-name
(if (map? base-type)
(:native base-type)
(sql.tx/field-base-type->sql-type driver base-type)))))
(interpose ", ")
(apply str)))))
(defmethod sql.tx/drop-table-if-exists-sql :firebird [& _] nil)
(defmethod load-data/load-data! :firebird [& args]
(apply load-data/load-data-one-at-a-time! args))
(defmethod execute/execute-sql! :firebird [& args]
(apply execute/sequentially-execute-sql! args))
;; Firebird cannot create or drop databases on runtime
(defmethod sql.tx/create-db-sql :firebird [& _] nil)
(defmethod sql.tx/drop-db-if-exists-sql :firebird [& _] nil)
(defmethod sql.tx/qualified-name-components :firebird
([_ db-name] [db-name])
([_ db-name table-name] [(tx/db-qualified-table-name db-name table-name)])
([_ db-name table-name field-name] [(tx/db-qualified-table-name db-name table-name) field-name]))
;; Drop all tables that are not system tables before running test
(defmethod tx/before-run :firebird [_]
(let [connection-spec (sql-jdbc.conn/connection-details->spec :firebird
(tx/dbdef->connection-details :firebird :server nil))
foreign-keys (jdbc/query
connection-spec
(str "select r.rdb$relation_name, r.rdb$constraint_name from rdb$relation_constraints r where (r.rdb$constraint_type='FOREIGN KEY')"))
leftover-tables (map :rdb$relation_name (jdbc/query
connection-spec
(str "SELECT RDB$RELATION_NAME "
"FROM RDB$RELATIONS "
"WHERE RDB$VIEW_BLR IS NULL "
"AND (RDB$SYSTEM_FLAG IS NULL OR RDB$SYSTEM_FLAG = 0);")))]
;; First, kill all connections and statements that are still running
(println "Killing all open connections to test db... ")
(jdbc/execute! connection-spec ["DELETE FROM MON$ATTACHMENTS WHERE MON$ATTACHMENT_ID <> CURRENT_CONNECTION"])
(println "[ok]")
(println "Killing all running statements in test db... ")
(jdbc/execute! connection-spec ["DELETE FROM MON$STATEMENTS WHERE MON$ATTACHMENT_ID <> CURRENT_CONNECTION"])
(println "[ok]")
;; Second, remove all foreign keys, so tables can be properly dropped
(doseq [constraint foreign-keys]
(u/ignore-exceptions
(printf "Dropping constraint \"%s\" on table \"%s\"...\n"
(:rdb$constraint_name constraint)
(:rdb$relation_name constraint))
(jdbc/execute! connection-spec [(format "ALTER TABLE \"%s\" DROP CONSTRAINT \"%s\";"
(:rdb$relation_name constraint)
(:rdb$constraint_name constraint))])
(println "[ok]")))
;; Third, drop all leftover tables
(doseq [table leftover-tables]
(u/ignore-exceptions
(printf "Dropping leftover Firebird table \"%s\"...\n" (str/trimr table))
(jdbc/execute! connection-spec [(format "DROP TABLE \"%s\";" (str/trimr table))])
(println "[ok]")))
(println "Destroyed all leftover tables.")))
| kumajaya/metabase-firebird-driver |
<|start_filename|>library/src/main/java/com/flipkart/chatheads/ui/ChatHeadListener.java<|end_filename|>
package com.flipkart.chatheads.ui;
import java.io.Serializable;
/**
* Created by kiran.kumar on 06/05/15.
*/
public interface ChatHeadListener<T> {
void onChatHeadAdded(T key);
void onChatHeadRemoved(T key, boolean userTriggered);
void onChatHeadArrangementChanged(ChatHeadArrangement oldArrangement, ChatHeadArrangement newArrangement);
<T extends Serializable> void onChatHeadAnimateEnd(ChatHead<T> chatHead);
<T extends Serializable> void onChatHeadAnimateStart(ChatHead chatHead);
}
<|start_filename|>library/src/main/java/com/flipkart/chatheads/ui/ChatHeadArrangement.java<|end_filename|>
package com.flipkart.chatheads.ui;
import android.os.Bundle;
import android.view.MotionEvent;
import com.facebook.rebound.Spring;
/**
* Created by kirankumar on 13/02/15.
*/
public abstract class ChatHeadArrangement {
public abstract void setContainer(ChatHeadManager container);
public abstract void onActivate(ChatHeadManager container, Bundle extras, int maxWidth, int maxHeight, boolean animated);
public abstract void onDeactivate(int maxWidth, int maxHeight);
public abstract void onSpringUpdate(ChatHead activeChatHead, boolean isDragging, int maxWidth, int maxHeight, Spring spring, Spring activeHorizontalSpring, Spring activeVerticalSpring, int totalVelocity);
public boolean handleRawTouchEvent(MotionEvent event) {
return false;
}
public abstract boolean handleTouchUp(ChatHead activeChatHead, int xVelocity, int yVelocity, Spring activeHorizontalSpring, Spring activeVerticalSpring, boolean wasDragging);
public abstract void onChatHeadAdded(ChatHead chatHead, boolean animated);
public abstract void onChatHeadRemoved(ChatHead removed);
public abstract void onCapture(ChatHeadManager container, ChatHead activeChatHead);
public abstract void selectChatHead(ChatHead chatHead);
public abstract void bringToFront(ChatHead chatHead);
public abstract void onReloadFragment(ChatHead chatHead);
public abstract boolean shouldShowCloseButton(ChatHead chatHead);
public abstract Integer getHeroIndex();
public abstract void onConfigChanged(ChatHeadConfig newConfig);
public abstract Bundle getRetainBundle();
public abstract boolean canDrag(ChatHead chatHead);
public abstract void removeOldestChatHead();
}
<|start_filename|>demo/src/main/java/com/flipkart/springyheads/demo/TestFragment.java<|end_filename|>
package com.flipkart.springyheads.demo;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.flipkart.springyheads.demo.R;
public class TestFragment extends Fragment {
public static TestFragment newInstance(int identifier) {
TestFragment testFragment = new TestFragment();
Bundle bundle = new Bundle();
bundle.putInt("id", identifier);
testFragment.setArguments(bundle);
return testFragment;
}
public TestFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View inflate = inflater.inflate(R.layout.fragment_test, container, false);
TextView identifier = (TextView) inflate.findViewById(R.id.identifier);
identifier.setText(String.valueOf(getArguments().getInt("id")));
return inflate;
}
}
<|start_filename|>library/src/main/java/com/flipkart/chatheads/ui/UpArrowLayout.java<|end_filename|>
package com.flipkart.chatheads.ui;
import android.content.Context;
import android.graphics.Point;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import com.flipkart.chatheads.R;
import java.util.ArrayList;
/**
* Created by kirankumar on 17/02/15.
*/
public class UpArrowLayout extends ViewGroup {
private final Point pointTo = new Point(0, 0);
private final ArrayList<View> mMatchParentChildren = new ArrayList<View>(1);
private ImageView arrowView;
private int arrowDrawable = R.drawable.chat_top_arrow;
public UpArrowLayout(Context context) {
super(context);
init();
}
public UpArrowLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public UpArrowLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
public int getArrowDrawable() {
return arrowDrawable;
}
public void setArrowDrawable(int arrowDrawable) {
this.arrowDrawable = arrowDrawable;
init();
}
private void init() {
if (arrowView != null) {
removeView(arrowView);
}
arrowView = createArrowView();
addView(arrowView);
}
protected ImageView createArrowView() {
Drawable drawable = getResources().getDrawable(arrowDrawable);
ImageView imageView = new ImageView(getContext());
imageView.setImageDrawable(drawable);
return imageView;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int count = getChildCount();
int measureSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
arrowView.measure(measureSpec, measureSpec);
int arrowViewMeasuredHeight = arrowView.getMeasuredHeight();
int size = MeasureSpec.getSize(heightMeasureSpec);
if (size > arrowViewMeasuredHeight) {
size -= arrowViewMeasuredHeight + pointTo.y;
heightMeasureSpec = MeasureSpec.makeMeasureSpec(size, MeasureSpec.getMode(heightMeasureSpec));
}
final boolean measureMatchParentChildren =
MeasureSpec.getMode(widthMeasureSpec) != MeasureSpec.EXACTLY ||
MeasureSpec.getMode(heightMeasureSpec) != MeasureSpec.EXACTLY;
int maxHeight = 0;
int maxWidth = 0;
int childState = 0;
for (int i = 0; i < count; i++) {
final View child = getChildAt(i);
if (child == arrowView) continue;
if (child.getVisibility() != GONE) {
measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, 0);
final LayoutParams lp = (LayoutParams) child.getLayoutParams();
maxWidth = Math.max(maxWidth,
child.getMeasuredWidth());
maxHeight = Math.max(maxHeight,
child.getMeasuredHeight());
childState = combineMeasuredStates(childState, child.getMeasuredState());
if (measureMatchParentChildren) {
if (lp.width == LayoutParams.MATCH_PARENT ||
lp.height == LayoutParams.MATCH_PARENT) {
mMatchParentChildren.add(child);
}
}
}
}
// Check against our minimum height and width
maxHeight = Math.max(maxHeight, getSuggestedMinimumHeight());
maxWidth = Math.max(maxWidth, getSuggestedMinimumWidth());
setMeasuredDimension(resolveSizeAndState(maxWidth, widthMeasureSpec, childState),
resolveSizeAndState(maxHeight, heightMeasureSpec,
childState << MEASURED_HEIGHT_STATE_SHIFT));
count = mMatchParentChildren.size();
if (count > 1) {
for (int i = 0; i < count; i++) {
final View child = mMatchParentChildren.get(i);
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
int childWidthMeasureSpec;
int childHeightMeasureSpec;
if (lp.width == LayoutParams.MATCH_PARENT) {
childWidthMeasureSpec = MeasureSpec.makeMeasureSpec(getMeasuredWidth() -
lp.leftMargin - lp.rightMargin,
MeasureSpec.EXACTLY);
} else {
childWidthMeasureSpec = getChildMeasureSpec(widthMeasureSpec,
lp.leftMargin + lp.rightMargin,
lp.width);
}
if (lp.height == LayoutParams.MATCH_PARENT) {
childHeightMeasureSpec = MeasureSpec.makeMeasureSpec(getMeasuredHeight() -
lp.topMargin - lp.bottomMargin,
MeasureSpec.EXACTLY);
} else {
childHeightMeasureSpec = getChildMeasureSpec(heightMeasureSpec,
lp.topMargin + lp.bottomMargin,
lp.height);
}
child.measure(childWidthMeasureSpec, childHeightMeasureSpec);
}
}
setMeasuredDimension(getMeasuredWidth(), getMeasuredHeight() + arrowViewMeasuredHeight + pointTo.y);
updatePointer();
}
protected void measureChildWithMargins(View child,
int parentWidthMeasureSpec, int widthUsed,
int parentHeightMeasureSpec, int heightUsed) {
final LayoutParams lp = child.getLayoutParams();
final int childWidthMeasureSpec = getChildMeasureSpec(parentWidthMeasureSpec, widthUsed, lp.width);
final int childHeightMeasureSpec = getChildMeasureSpec(parentHeightMeasureSpec,
+heightUsed, lp.height);
child.measure(childWidthMeasureSpec, childHeightMeasureSpec);
}
public void pointTo(final int viewX, final int viewY) {
pointTo.x = viewX;
pointTo.y = viewY;
if (getMeasuredHeight() != 0 && getMeasuredWidth() != 0) {
updatePointer();
}
invalidate();
}
private void updatePointer() {
int x = (int) (pointTo.x - arrowView.getMeasuredWidth() / 2);
int y = pointTo.y;
if (x != arrowView.getTranslationX()) {
arrowView.setTranslationX(x);
}
if (y != arrowView.getTranslationY()) {
arrowView.setTranslationY(y);
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
arrowView.layout(left, top, left + arrowView.getMeasuredWidth(), top + arrowView.getMeasuredHeight());
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
if (child == arrowView) continue;
child.layout(left, top + arrowView.getMeasuredHeight() + pointTo.y, right, bottom);
}
}
protected LayoutParams generateDefaultLayoutParams() {
return new FrameLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
}
@Override
public LayoutParams generateLayoutParams(AttributeSet attrs) {
return new FrameLayout.LayoutParams(getContext(), attrs);
}
@Override
public void removeAllViews() {
super.removeAllViews();
addView(arrowView);
}
}
<|start_filename|>library/src/main/java/com/flipkart/chatheads/ui/ChatHeadContainer.java<|end_filename|>
package com.flipkart.chatheads.ui;
import android.content.res.Configuration;
import android.util.DisplayMetrics;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewManager;
import android.view.WindowManager;
import com.facebook.rebound.SimpleSpringListener;
import com.facebook.rebound.Spring;
import java.io.Serializable;
/**
* Created by kiran.kumar on 27/10/16.
*/
public interface ChatHeadContainer {
void onInitialized(ChatHeadManager manager);
DisplayMetrics getDisplayMetrics();
ViewGroup.LayoutParams createLayoutParams(int height, int width, int gravity, int bottomMargin);
void setViewX(View view, int xPosition);
void setViewY(View view, int yPosition);
int getViewX(View view);
int getViewY(View view);
void bringToFront(View view);
void addView(View view, ViewGroup.LayoutParams layoutParams);
void removeView(View view);
void onArrangementChanged(ChatHeadArrangement oldArrangement, ChatHeadArrangement newArrangement);
void requestLayout();
}
<|start_filename|>library/src/main/java/com/flipkart/chatheads/ui/ChatHeadConfig.java<|end_filename|>
package com.flipkart.chatheads.ui;
import android.graphics.Point;
/**
* Created by kiran.kumar on 06/05/15.
*/
public class ChatHeadConfig {
private int headHeight;
private int headWidth;
private int headHorizontalSpacing;
private int headVerticalSpacing;
private Point initialPosition;
private int maxChatHeads;
private int closeButtonWidth;
private int closeButtonHeight;
private int closeButtonBottomMargin;
private int circularRingWidth;
private int circularRingHeight;
private int circularFanOutRadius;
private boolean closeButtonHidden;
public boolean isCloseButtonHidden() {
return closeButtonHidden;
}
public void setCloseButtonHidden(boolean closeButtonHidden) {
this.closeButtonHidden = closeButtonHidden;
}
public void setCircularFanOutRadius(int circularFanOutRadius) {
this.circularFanOutRadius = circularFanOutRadius;
}
public int getMaxChatHeads() {
return maxChatHeads;
}
public void setMaxChatHeads(int maxChatHeads) {
this.maxChatHeads = maxChatHeads;
}
public int getHeadHeight() {
return headHeight;
}
public void setHeadHeight(int headHeight) {
this.headHeight = headHeight;
}
public int getHeadWidth() {
return headWidth;
}
public void setHeadWidth(int headWidth) {
this.headWidth = headWidth;
}
public int getHeadHorizontalSpacing(int maxWidth, int maxHeight) {
return headHorizontalSpacing;
}
public void setHeadHorizontalSpacing(int headHorizontalSpacing) {
this.headHorizontalSpacing = headHorizontalSpacing;
}
public int getHeadVerticalSpacing(int maxWidth, int maxHeight) {
return headVerticalSpacing;
}
public void setHeadVerticalSpacing(int headVerticalSpacing) {
this.headVerticalSpacing = headVerticalSpacing;
}
public Point getInitialPosition() {
return initialPosition;
}
public void setInitialPosition(Point initialPosition) {
this.initialPosition = initialPosition;
}
public int getMaxChatHeads(int maxWidth, int maxHeight) {
return maxChatHeads;
}
public int getCloseButtonWidth() {
return closeButtonWidth;
}
public void setCloseButtonWidth(int closeButtonWidth) {
this.closeButtonWidth = closeButtonWidth;
}
public int getCloseButtonHeight() {
return closeButtonHeight;
}
public void setCloseButtonHeight(int closeButtonHeight) {
this.closeButtonHeight = closeButtonHeight;
}
public int getCloseButtonBottomMargin() {
return closeButtonBottomMargin;
}
public void setCloseButtonBottomMargin(int closeButtonBottomMargin) {
this.closeButtonBottomMargin = closeButtonBottomMargin;
}
public int getCircularRingWidth() {
return circularRingWidth;
}
public void setCircularRingWidth(int circularRingWidth) {
this.circularRingWidth = circularRingWidth;
}
public int getCircularRingHeight() {
return circularRingHeight;
}
public void setCircularRingHeight(int circularRingHeight) {
this.circularRingHeight = circularRingHeight;
}
public int getCircularFanOutRadius(int maxWidth, int maxHeight) {
return circularFanOutRadius;
}
}
<|start_filename|>demo/src/main/java/com/flipkart/springyheads/demo/FloatingActivity.java<|end_filename|>
package com.flipkart.springyheads.demo;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.IBinder;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
/**
* Created by kiran.kumar on 06/02/16.
*/
public class FloatingActivity extends Activity implements View.OnClickListener {
private Button addButton;
private Button removeButton;
private Button removeAllButtons;
private Button toggleButton;
private Button updateBadgeCount;
private ChatHeadService chatHeadService;
private boolean bound;
/**
* Defines callbacks for service binding, passed to bindService()
*/
private ServiceConnection mConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName className,
IBinder service) {
// We've bound to LocalService, cast the IBinder and get LocalService instance
ChatHeadService.LocalBinder binder = (ChatHeadService.LocalBinder) service;
chatHeadService = binder.getService();
bound = true;
chatHeadService.minimize();
}
@Override
public void onServiceDisconnected(ComponentName arg0) {
bound = false;
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Intent intent = new Intent(this, ChatHeadService.class);
startService(intent);
bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
setupButtons();
}
private void setupButtons() {
setContentView(R.layout.activity_main);
addButton = (Button) findViewById(R.id.add_head);
removeButton = (Button) findViewById(R.id.remove_head);
removeAllButtons = (Button) findViewById(R.id.remove_all_heads);
toggleButton = (Button) findViewById(R.id.toggle_arrangement);
updateBadgeCount = (Button) findViewById(R.id.update_badge_count);
addButton.setOnClickListener(this);
removeButton.setOnClickListener(this);
removeAllButtons.setOnClickListener(this);
toggleButton.setOnClickListener(this);
updateBadgeCount.setOnClickListener(this);
}
@Override
public void onClick(View v) {
if (bound) {
if (v == addButton) {
chatHeadService.addChatHead();
} else if (v == removeButton) {
chatHeadService.removeChatHead();
} else if (v == removeAllButtons) {
chatHeadService.removeAllChatHeads();
} else if (v == toggleButton) {
chatHeadService.toggleArrangement();
} else if (v == updateBadgeCount) {
chatHeadService.updateBadgeCount();
}
} else {
Toast.makeText(this, "Service not bound", Toast.LENGTH_SHORT).show();
}
}
}
| dantepro1955/springy-heads |
<|start_filename|>recipes-core/essos-stub/files/essos-app.h<|end_filename|>
/*
* If not stated otherwise in this file or this component's Licenses.txt file the
* following copyright and licenses apply:
*
* Copyright 2017 RDK Management
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __ESSOS_APP__
#define __ESSOS_APP__
/* ----------------------------------------------------------------------------
* Essos apps are single windowed portable applications that render graphics
* using OpenGLES2.
*
* The API flow for a minimal app is:
* EssContextCreate()
* EssContextInit()
* EssContextSetKeyListener()
* EssContextSetPointerListener()
* EssContextSetSettingsListener()
* EssContextSetTerminateListener()
* doEGLSetup() including:
* EssContextGetEGLDisplayType()
* EssContextCreateNativeWindow()
* EssContextStart()
*
* then execute a main loop that includes
*
* EssContextRunEventLoopOnce()
* doOpenGLES2Rendering()
* EssContextUpdateDisplay();
*---------------------------------------------------------------------------- */
#include <EGL/egl.h>
#include <EGL/eglext.h>
#ifndef __cplusplus
#include <stdbool.h>
#endif
#if defined(__cplusplus)
extern "C" {
#endif
typedef struct _EssCtx EssCtx;
typedef struct _EssKeyListener
{
void (*keyPressed)( void *userData, unsigned int key );
void (*keyReleased)( void *userData, unsigned int key );
void (*keyRepeat)( void *userData, unsigned int key );
} EssKeyListener;
typedef struct _EssPointerListener
{
void (*pointerMotion)( void *userData, int x, int y );
void (*pointerButtonPressed)( void *userData, int button, int x, int y );
void (*pointerButtonReleased)( void *userData, int button, int x, int y );
} EssPointerListener;
typedef struct _EssTouchListener
{
void (*touchDown)( void *userData, int id, int x, int y );
void (*touchUp)( void *userData, int id );
void (*touchMotion)( void *userData, int id, int x, int y );
void (*touchFrame)( void *userData );
} EssTouchListener;
typedef struct _EssSettingsListener
{
void (*displaySize)( void *userData, int width, int height );
/*
* If content rendered to the edges of the display are not guaranteed to be
* visible, this callback will provide information on the region of the
* application display surface that are guaranteed to be visible. An application
* can optionally use this information to, for example, draw a background image
* that goes to the display edges but position important information within
* the specified safe area. */
void (*displaySafeArea)( void *userData, int x, int y, int width, int height );
} EssSettingsListener;
typedef struct _EssTerminateListener
{
/*
* Called to notify the application that it should shutdown. If this
* callback is invoked it means some abnormal condition has occurred and
* the application should shutdown.
*/
void (*terminated)( void *userData );
} EssTerminateListener;
/**
* EssContextCreate
*
* Create an Essos application context.
*/
EssCtx* EssContextCreate();
/**
* EssContextDestroy
*
* Destroy and application instance. If the application is running
* it will be stopped. All resources will be freed.
*/
void EssContextDestroy( EssCtx *ctx );
/**
* EssContextGetLastErrorDetail
*
* Returns a null terminated string giving information about the
* last error that has occurred. If any Essos API fails, this method
* can be used to obtain a string that should be logged.
*/
const char *EssContextGetLastErrorDetail( EssCtx *ctx );
/**
* EssContextInit
*
* Initialize an application context. Inititialization will be performed
* by EssContextStart but for use cases where it is not desired to start
* an application context, EssContextInit must be called before methods
* such as EssContextGetEGLDisplayType or ESSContextCreateNativeWindow
* can be called.
*/
bool EssContextInit( EssCtx *ctx );
/**
* EssContextGetEGLDisplayType
*
* Returns a NativeDisplayType value that can be used in an eglGetDisplay call. This
* API is for applications that wish to create their EGL environment rather than allowing Essos
* to do so automatically.
*/
bool EssContextGetEGLDisplayType( EssCtx *ctx, NativeDisplayType *displayType );
/**
* EssContextCreateNativeWindow
*
* Creates a NativeWindowType value that can be used in an eglCreateWindowSurface call. Passing
* a NULL value for pointer to nativeWindow causes the EGL environment to be automtically
* setup during the call to EssContextStart.
*/
bool EssContextCreateNativeWindow( EssCtx *ctx, int width, int h, NativeWindowType *nativeWindow );
/**
* EssContextDestroyNativeWindow
*
* Destroys a NativeWindowType value obtained from EssContextCreateNativeWindow.
*/
bool EssContextDestroyNativeWindow( EssCtx *ctx, NativeWindowType nativeWindow );
/**
* EssContextSetKeyListener
*
* Set a key listener (see EssKeyListener) to receive key event callbacks. Key
* codes are Linux codes defined by linux/input.h
*/
bool EssContextSetKeyListener( EssCtx *ctx, void *userData, EssKeyListener *listener );
/**
* EssContextSetPointerListener
*
* Set a pointer listener (see EssPointerListener) to receive pointer event callbacks.
* Button codes are Linux codes defined by linux/input.h
*/
bool EssContextSetPointerListener( EssCtx *ctx, void *userData, EssPointerListener *listener );
/**
* EssContextSetTouchListener
*
* Set a touch listener (see EssTouchListener) to receive touch event callbacks.
*/
bool EssContextSetTouchListener( EssCtx *ctx, void *userData, EssTouchListener *listener );
/**
* EssContextSetSettingsListener
*
* Set a settings listener (see EssSettingsListener) to receive settings event callbacks.
*/
bool EssContextSetSettingsListener( EssCtx *ctx, void *userData, EssSettingsListener *listener );
/**
* EssContextSetTerminateListener
*
* Set a terminate listener (see EssTerminateListener) to receive a callback when the
* application is being terminated. The registered terminate listener will be invoked
* if some abnormal condition required the application to shutdown.
*/
bool EssContextSetTerminateListener( EssCtx *ctx, void *userData, EssTerminateListener *listener );
/**
* EssContextSetName
*
* Establish name of the application context. This must be called
* before initializing or starting the application
*/
bool EssContextSetName( EssCtx *ctx, const char *name );
/**
* EssContextGetDisplaySize
*
* Returns the width and height of the display.
*/
bool EssContextGetDisplaySize( EssCtx *ctx, int *width, int *height );
/**
* EssContextGetDisplaySafeArea
*
* Returns a rectangle giving the display safe area. This is the region of the display
* that is guaranteed to be visible to the user and not hidden by overscan.
*/
bool EssContextGetDisplaySafeArea( EssCtx *ctx, int *x, int *y, int *width, int *height );
/**
* EssContextStart
*
* Start an application context running. Context initialization will be performed by this call
* if it has not already been done with EssContextInit. For applications that allow Essos to perform EGL
* setup, the EGL environment will be active after calling this method. GLES2 rendering can then
* be performed on this thread with buffer flips triggered by calls to EssContextUpdateDisplay. For
* aoolications that manually perform EGL creation the EGL creation must be done between calling
* EssContextInit and EssContextStart. When manual EGL setup is done (EssContextCreateNativeWindow has
* been called), EssContextStart will skip automatic EGL setup. EssContextStart will also perform
* setup required for user input and any other required setup.
*
* While running the EssContextRunEventLoop method must be regularly called.
*/
bool EssContextStart( EssCtx *ctx );
/**
* EssContextStop
*
* Stop an application context.
*/
void EssContextStop( EssCtx *ctx );
/**
* EssContextResizeWindow
*
* Set a new window size. This API may be called in response to a display size notification received
* via an Essos settings listener.
*/
bool EssContextResizeWindow( EssCtx *ctx, int width, int height );
/**
* EssContextRunEventLoopOnce
*
* Perform event processing. This API will not block if no events are pending.
* It must be called regularly while the aoplication is running.
*/
void EssContextRunEventLoopOnce( EssCtx *ctx);
/**
* EssContextUpdateDisplay
*
* Perform a buffer flip operation.
*/
void EssContextUpdateDisplay( EssCtx *ctx );
#if defined(__cplusplus)
} //extern "C"
#endif
#endif
| ZbyszekK/meta-dac-sdk-1 |
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignAndMigrateViaReplacementSpec.groovy<|end_filename|>
package nebula.plugin.resolutionrules
import spock.lang.Unroll
class AlignAndMigrateViaReplacementSpec extends AbstractAlignAndMigrateSpec {
@Unroll
def 'align and migrate via replacement'() {
given:
createAlignAndReplaceRules(['other:e': 'test.nebula:c'])
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
results.output.contains("test.nebula:a:1.0.0 -> $alignedVersion")
results.output.contains("test.nebula:b:$alignedVersion")
results.output.contains("other:e:4.0.0 -> test.nebula:c:1.0.1") // not aligned :/
results.output.contains("belongs to platform aligned-platform")
when:
def dependenciesTasks = ['dependencies', '--configuration', 'compileClasspath']
def resultsForDependencies = runTasks(*dependenciesTasks)
then:
resultsForDependencies.output.contains("other:e:4.0.0 -> test.nebula:c:1.0.1") // not aligned :/
}
@Unroll
def 'align and migrate via replacement with brought in dependency as direct as well'() {
given:
createAlignAndReplaceRules(['other:e': 'test.nebula:c'])
buildFile << """
dependencies {
implementation 'test.nebula:c:1.0.1'
}
"""
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
results.output.contains("test.nebula:a:1.0.0 -> $alignedVersion")
results.output.contains("test.nebula:b:$alignedVersion")
results.output.contains("other:e:4.0.0 -> test.nebula:c:$alignedVersion")
results.output.contains("belongs to platform aligned-platform")
when:
def dependenciesTasks = ['dependencies', '--configuration', 'compileClasspath']
def resultsForDependencies = runTasks(*dependenciesTasks)
then:
resultsForDependencies.output.contains("other:e:4.0.0 -> test.nebula:c:$alignedVersion")
}
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignRulesForceSpec.groovy<|end_filename|>
package nebula.plugin.resolutionrules
import nebula.test.dependencies.DependencyGraphBuilder
import nebula.test.dependencies.GradleDependencyGenerator
import spock.lang.Unroll
class AlignRulesForceSpec extends AbstractAlignRulesSpec {
def setup() {
keepFiles = true
debug = true
}
@Unroll
def 'alignment uses #name forced version'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:0.15.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:0.15.0')
.addModule('test.nebula:c:1.0.0')
.addModule('test.nebula:c:0.15.0')
.addModule('test.nebula.other:a:1.0.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b:1.0.0'
implementation 'test.nebula:c:0.15.0'
implementation 'test.nebula.other:a:1.0.0'
}
$force
""".stripIndent()
when:
def tasks = ['dependencies', '--configuration', 'compileClasspath', '--warning-mode', 'none']
def result = runTasks(*tasks)
then:
result.output.contains '+--- test.nebula:a:1.0.0 -> 0.15.0\n'
result.output.contains '+--- test.nebula:b:1.0.0 -> 0.15.0\n'
result.output.contains '+--- test.nebula:c:0.15.0\n'
result.output.contains '--- test.nebula.other:a:1.0.0\n'
where:
name | force
"all" | "configurations.all { resolutionStrategy { force 'test.nebula:a:0.15.0' } }"
"configuration" | "configurations.compileClasspath { resolutionStrategy { force 'test.nebula:a:0.15.0' } }"
"dependency" | "dependencies { implementation ('test.nebula:a:0.15.0') { force = true } }"
}
@Unroll
def 'when multiple forces are present then Core alignment fails due to multiple forces'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:2.0.0')
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:0.15.0')
.addModule('test.nebula:b:2.0.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:0.15.0')
.addModule('test.nebula:c:2.0.0')
.addModule('test.nebula:c:1.0.0')
.addModule('test.nebula:c:0.15.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:2.0.0'
implementation 'test.nebula:b:2.0.0'
implementation 'test.nebula:c:1.0.0'
}
configurations.compileClasspath.resolutionStrategy {
force 'test.nebula:a:2.0.0'
force 'test.nebula:b:1.0.0'
force 'test.nebula:c:0.15.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath', '--warning-mode', 'none')
def dependencyInsightResult = runTasks('dependencyInsight', '--dependency', 'test.nebula', '--warning-mode', 'none')
then:
assert dependencyInsightResult.output.contains('Multiple forces on different versions for virtual platform ')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:a:2.0.0')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:b:2.0.0')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:c:1.0.0')
}
@Unroll
def 'when dynamic forces are present then Core alignment fails due to multiple forces'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:2.0.0')
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:0.15.0')
.addModule('test.nebula:b:2.0.0')
.addModule('test.nebula:b:1.00.0')
.addModule('test.nebula:b:0.15.0')
.addModule('test.nebula:c:2.0.0')
.addModule('test.nebula:c:1.0.0')
.addModule('test.nebula:c:0.15.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:2.0.0'
implementation 'test.nebula:b:2.0.0'
implementation 'test.nebula:c:1.0.0'
}
configurations.compileClasspath.resolutionStrategy {
force 'test.nebula:a:latest.release'
force 'test.nebula:b:1.+'
force 'test.nebula:c:0.15.0'
}
""".stripIndent()
when:
def tasks = ['dependencies', '--configuration', 'compileClasspath', '--warning-mode', 'none']
def result = runTasks(*tasks)
def dependencyInsightResult = runTasks('dependencyInsight', '--dependency', 'test.nebula', '--warning-mode', 'none')
then:
assert dependencyInsightResult.output.contains('Multiple forces on different versions for virtual platform ')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:a:2.0.0')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:b:2.0.0')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:c:1.0.0')
}
@Unroll
def 'alignment with latest.release force'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:2.0.0')
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:0.15.0')
.addModule('test.nebula:b:2.0.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:0.15.0')
.addModule('test.nebula:c:2.0.0')
.addModule('test.nebula:c:1.0.0')
.addModule('test.nebula:c:0.15.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:2.0.0'
implementation 'test.nebula:b:1.0.0'
implementation 'test.nebula:c:0.15.0'
}
configurations.compileClasspath.resolutionStrategy {
force 'test.nebula:a:latest.release'
}
""".stripIndent()
when:
def tasks = ['dependencies', '--configuration', 'compileClasspath', '--warning-mode', 'none']
def result = runTasks(*tasks)
then:
result.output.contains '+--- test.nebula:a:2.0.0\n'
result.output.contains '+--- test.nebula:b:1.0.0 -> 2.0.0\n'
result.output.contains '\\--- test.nebula:c:0.15.0 -> 2.0.0\n'
}
@Unroll
def 'alignment with sub-version force'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:2.0.0')
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:0.15.0')
.addModule('test.nebula:b:2.0.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:0.15.0')
.addModule('test.nebula:c:2.0.0')
.addModule('test.nebula:c:1.0.0')
.addModule('test.nebula:c:0.15.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:2.0.0'
implementation 'test.nebula:b:1.0.0'
implementation 'test.nebula:c:0.15.0'
}
configurations.compileClasspath.resolutionStrategy {
force 'test.nebula:a:1.+'
}
""".stripIndent()
when:
def tasks = ['dependencies', '--configuration', 'compileClasspath', '--warning-mode', 'none']
def result = runTasks(*tasks)
then:
result.output.contains '+--- test.nebula:a:2.0.0 -> 1.0.0\n'
result.output.contains '+--- test.nebula:b:1.0.0\n'
result.output.contains '\\--- test.nebula:c:0.15.0 -> 1.0.0\n'
}
@Unroll
def 'with multiple specific dynamic versions then Core alignment fails due to multiple forces'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:3.0.0')
.addModule('test.nebula:a:2.0.0')
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:0.15.0')
.addModule('test.nebula:b:2.0.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:0.15.0')
.addModule('test.nebula:c:2.0.0')
.addModule('test.nebula:c:1.0.0')
.addModule('test.nebula:c:0.15.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:2.0.0'
implementation 'test.nebula:b:1.0.0'
implementation 'test.nebula:c:0.15.0'
}
configurations.compileClasspath.resolutionStrategy {
force 'test.nebula:a:latest.release'
force 'test.nebula:b:1.+'
force 'test.nebula:c:[1.0, 2.0)'
}
""".stripIndent()
when:
def tasks = ['dependencies', '--configuration', 'compileClasspath', '--warning-mode', 'none']
def result = runTasks(*tasks)
def dependencyInsightResult = runTasks('dependencyInsight', '--dependency', 'test.nebula', '--warning-mode', 'none')
then:
assert dependencyInsightResult.output.contains('Multiple forces on different versions for virtual platform ')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:a:2.0.0')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:b:1.0.0')
assert dependencyInsightResult.output.contains('Could not resolve test.nebula:c:0.15.0')
}
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignRulesPluginInteractionSpec.groovy<|end_filename|>
/*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package nebula.plugin.resolutionrules
import nebula.test.IntegrationTestKitSpec
import nebula.test.dependencies.DependencyGraphBuilder
import nebula.test.dependencies.GradleDependencyGenerator
import org.gradle.util.GradleVersion
import spock.lang.IgnoreIf
import spock.lang.Issue
import spock.lang.Unroll
import java.util.jar.Attributes
import java.util.jar.JarEntry
import java.util.jar.JarOutputStream
import java.util.jar.Manifest
class AlignRulesPluginInteractionSpec extends IntegrationTestKitSpec {
def setup() {
definePluginOutsideOfPluginBlock = true
debug = true
keepFiles = true
}
@Unroll
def 'alignment interaction with dependency-recommender'() {
def graph = new DependencyGraphBuilder()
.addModule('test.a:a:1.42.2')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:nebula-dependency-recommender:10.0.1'
}
}
apply plugin: 'nebula.resolution-rules'
apply plugin: 'java'
apply plugin: 'nebula.dependency-recommender'
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencyRecommendations {
map recommendations: ['test.a:a': '1.42.2']
}
dependencies {
resolutionRules files('$rulesJsonFile')
implementation 'test.a:a'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
result.output.contains '\\--- test.a:a -> 1.42.2\n'
}
@Unroll
def 'alignment interaction with dependency-recommender reverse order of application'() {
def graph = new DependencyGraphBuilder()
.addModule('test.a:a:1.42.2')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:nebula-dependency-recommender:10.0.1'
}
}
apply plugin: 'nebula.dependency-recommender'
apply plugin: 'nebula.resolution-rules'
apply plugin: 'java'
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencyRecommendations {
map recommendations: ['test.a:a': '1.42.2']
}
dependencies {
resolutionRules files('$rulesJsonFile')
implementation 'test.a:a'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
result.output.contains '\\--- test.a:a -> 1.42.2\n'
}
@Unroll
def 'alignment interaction with dependency-recommender transitive project dependencies'() {
def graph = new DependencyGraphBuilder()
.addModule('test.a:a:1.42.2')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
addSubproject('a')
addSubproject('b')
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:nebula-dependency-recommender:10.0.1'
}
}
allprojects {
apply plugin: 'nebula.dependency-recommender'
apply plugin: 'nebula.resolution-rules'
repositories {
${mavenrepo.mavenRepositoryBlock}
}
}
dependencies {
resolutionRules files('$rulesJsonFile')
}
subprojects {
dependencyRecommendations {
map recommendations: ['test.a:a': '1.42.2']
}
}
project(':a') {
apply plugin: 'java'
dependencies {
implementation project(':b')
}
}
project(':b') {
apply plugin: 'java-library'
dependencies {
api 'test.a:a'
}
}
""".stripIndent()
when:
def result = runTasks(':a:dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
result.output.contains '\\--- test.a:a -> 1.42.2\n'
}
@Unroll
//Spring boot plugin 1.x is using removed runtime configuration. Unless backported for Gradle 7.0 it cannot be used
@IgnoreIf({ GradleVersion.current().baseVersion >= GradleVersion.version("7.0")})
def 'align rules work with spring-boot version #springVersion '() {
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath('org.springframework.boot:spring-boot-gradle-plugin:${springVersion}')
}
}
apply plugin: 'spring-boot'
apply plugin: 'nebula.resolution-rules'
repositories { mavenCentral() }
dependencies {
resolutionRules files('$rulesJsonFile')
implementation('org.springframework.boot:spring-boot-starter-web')
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath', '--warning-mode', 'none', "-Dnebula.features.coreLockingSupport=false")
then:
noExceptionThrown()
where:
springVersion = '1.4.0.RELEASE'
}
@Unroll
//Spring boot plugin 1.x is using removed runtime configuration. Unless backported for Gradle 7.0 it cannot be used
@IgnoreIf({ GradleVersion.current().baseVersion >= GradleVersion.version("7.0")})
def 'spring-boot interaction for version #springVersion '() {
def rulesFolder = new File(projectDir, 'rules')
rulesFolder.mkdirs()
def rulesJsonFile = new File(rulesFolder, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
def mavenForRules = new File(projectDir, 'repo')
mavenForRules.mkdirs()
def locked = new File(mavenForRules, 'test/rules/resolution-rules/1.0.0')
locked.mkdirs()
createRulesJar([rulesFolder], projectDir, new File(locked, 'resolution-rules-1.0.0.jar'))
createPom('test.rules', 'resolution-rules', '1.0.0', locked)
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath('org.springframework.boot:spring-boot-gradle-plugin:${springVersion}')
}
}
apply plugin: 'spring-boot'
apply plugin: 'nebula.resolution-rules'
repositories {
mavenCentral()
maven { url '${mavenForRules.absolutePath}' }
}
dependencies {
resolutionRules 'test.rules:resolution-rules:1.0.0'
implementation 'org.springframework.boot:spring-boot-starter-web'
}
""".stripIndent()
writeHelloWorld('example')
when:
def result = runTasks('compileJava', '--warning-mode', 'none', "-Dnebula.features.coreLockingSupport=false")
then:
noExceptionThrown()
where:
springVersion = '1.4.0.RELEASE'
}
@Unroll
def 'transitive aligns with spring dependency management'() {
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "align-aws-java-sdk",
"group": "com.amazonaws",
"includes": ["aws-java-sdk", "aws-java-sdk-.*"],
"excludes": ["aws-java-sdk-(handwritten-samples|sample-extractor|samples-pom|generated-samples|samples|archetype|swf-libraries)"],
"reason": "Align AWS Java SDK libraries",
"author": "<NAME> <<EMAIL>>",
"date": "2016-04-28T22:31:14.321Z"
}
]
}
'''.stripIndent()
buildFile << """\
buildscript {
dependencies {
classpath 'io.spring.gradle:dependency-management-plugin:0.6.1.RELEASE'
}
repositories {
mavenCentral()
}
}
apply plugin: 'java'
apply plugin: 'nebula.resolution-rules'
apply plugin: 'io.spring.dependency-management'
dependencyManagement {
imports {
mavenBom 'org.springframework.boot:spring-boot-starter-parent:1.4.3.RELEASE'
mavenBom 'org.springframework.cloud:spring-cloud-dependencies:Camden.SR3'
}
}
repositories {
mavenCentral()
}
dependencies {
resolutionRules files('$rulesJsonFile')
implementation 'com.amazonaws:aws-java-sdk-s3'
implementation 'com.netflix.servo:servo-aws:0.12.12'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
result.output.contains('+--- com.amazonaws:aws-java-sdk-s3 -> 1.11.18')
}
@Unroll
def 'publishing and dependency-recommender interacting with resolution-rules'() {
def graph = new DependencyGraphBuilder()
.addModule('test.a:a:1.42.2')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:nebula-dependency-recommender:10.0.1'
classpath 'com.netflix.nebula:nebula-publishing-plugin:17.3.2'
}
}
apply plugin: 'nebula.dependency-recommender'
apply plugin: 'nebula.maven-publish'
apply plugin: 'nebula.resolution-rules'
apply plugin: 'java'
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencyRecommendations {
map recommendations: ['test.a:a': '1.42.2', 'test.a:b': '1.2.1']
}
dependencies {
resolutionRules files('$rulesJsonFile')
implementation 'test.a:a'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
result.output.contains '\\--- test.a:a -> 1.42.2\n'
}
@Unroll
def 'cycle like behavior'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:c:1.42.2')
.addModule('test.nebula:d:1.2.1')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
]
}
'''.stripIndent()
buildFile << """\
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath 'com.netflix.nebula:nebula-publishing-plugin:17.3.2'
}
}
allprojects {
apply plugin: 'nebula.resolution-rules'
repositories {
${mavenrepo.mavenRepositoryBlock}
}
}
dependencies {
resolutionRules files('$rulesJsonFile')
}
subprojects {
apply plugin: 'nebula.maven-publish'
apply plugin: 'java'
}
""".stripIndent()
def aDir = addSubproject('a', '''\
dependencies {
implementation 'test.nebula:c:1.+'
testImplementation project(':b')
}
'''.stripIndent())
def bDir = addSubproject('b', '''\
dependencies {
implementation 'test.nebula:d:[1.0.0, 2.0.0)'
implementation project(':a')
}
'''.stripIndent())
when:
def results = runTasks(':a:dependencies', ':b:dependencies', 'assemble', "-Dnebula.features.coreLockingSupport=false")
then:
noExceptionThrown()
}
@Unroll
def 'able to lock rules'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.41.5')
.addModule('test.nebula:a:1.42.2')
.addModule('test.nebula:b:1.41.5')
.addModule('test.nebula:b:1.42.2')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "$projectDir/testrepogen")
mavenrepo.generateTestMavenRepo()
def rulesFolder = new File(projectDir, 'rules')
rulesFolder.mkdirs()
def rulesJsonFile = new File(rulesFolder, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
def mavenForRules = new File(projectDir, 'repo')
mavenForRules.mkdirs()
def locked = new File(mavenForRules, 'test/rules/resolution-rules/1.0.0')
locked.mkdirs()
createRulesJar([rulesFolder], projectDir, new File(locked, 'resolution-rules-1.0.0.jar'))
createPom('test.rules', 'resolution-rules', '1.0.0', locked)
rulesJsonFile.text = '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [], "align": []
}
'''.stripIndent()
def newer = new File(mavenForRules, 'test/rules/resolution-rules/1.1.0')
newer.mkdirs()
createRulesJar([rulesFolder], projectDir, new File(newer, 'resolution-rules-1.1.0.jar'))
createPom('test.rules', 'resolution-rules', '1.1.0', newer)
def dependencyLock = new File(projectDir, 'dependencies.lock')
dependencyLock << '''\
{
"resolutionRules": {
"test.rules:resolution-rules": { "locked": "1.0.0" }
}
}
'''.stripIndent()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:11.+'
}
}
apply plugin: 'nebula.resolution-rules'
apply plugin: 'nebula.dependency-lock'
apply plugin: 'java'
repositories {
${mavenrepo.mavenRepositoryBlock}
maven { url '${mavenForRules.absolutePath}' }
}
dependencies {
resolutionRules 'test.rules:resolution-rules:1.+'
implementation 'test.nebula:a:1.41.5'
implementation 'test.nebula:b:1.42.2'
}
""".stripIndent()
when:
def results = runTasks('dependencyInsight', '--dependency', 'a', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
def resultsForRules = runTasks('dependencyInsight', '--dependency', 'test.rules', '--configuration', 'resolutionRules', "-Dnebula.features.coreLockingSupport=false")
then:
results.output.contains 'test.nebula:a:1.41.5 -> 1.42.2\n'
results.output.contains 'test.nebula:b:1.42.2\n'
resultsForRules.output.contains 'test.rules:resolution-rules:1.+ -> 1.0.0\n'
resultsForRules.output.contains 'Selected by rule : test.rules:resolution-rules locked to 1.0.0'
}
@Unroll
def 'dependency-lock when applied after wins out over new locked alignment rules'() {
def (GradleDependencyGenerator mavenrepo, File mavenForRules, File jsonRuleFile) = dependencyLockAlignInteractionSetupWithLockedResolutionRulesConfiguration()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:11.+'
}
}
apply plugin: 'nebula.resolution-rules'
apply plugin: 'nebula.dependency-lock'
apply plugin: 'java'
repositories {
${mavenrepo.mavenRepositoryBlock}
maven { url '${mavenForRules.absolutePath}' }
}
dependencies {
resolutionRules 'test.rules:resolution-rules:1.+'
implementation 'test.nebula:a:1.41.5'
implementation 'test.nebula:b:1.42.2'
}
""".stripIndent()
when:
def results = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
def resultsForRules = runTasks('dependencies', '--configuration', 'resolutionRules', "-Dnebula.features.coreLockingSupport=false")
then:
// results using resolution rules that do not yet align test.nebula
results.output.contains 'test.nebula:a:1.41.5\n'
results.output.contains 'test.nebula:b:1.42.2\n'
resultsForRules.output.contains 'test.rules:resolution-rules:1.+ -> 1.0.0\n'
when:
def resultsIgnoringLocks = runTasks('-PdependencyLock.ignore=true', 'dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
def resultsForRulesIgnoringLocks = runTasks('-PdependencyLock.ignore=true', 'dependencies', '--configuration', 'resolutionRules', "-Dnebula.features.coreLockingSupport=false")
then:
// final results if we ignore locks
resultsIgnoringLocks.output.contains 'test.nebula:a:1.41.5 -> 1.42.2\n'
resultsIgnoringLocks.output.contains 'test.nebula:b:1.42.2\n'
resultsForRulesIgnoringLocks.output.contains 'test.rules:resolution-rules:1.+ -> 1.1.0\n'
}
@Unroll
def 'dependency-lock plugin applied after resolution-rules plugin with non-locked resolution rules - fail due to dependency lock state is out of date '() {
// note: this is a more unusual case. Typically resolution rules are distributed like a library, version controlled, and locked like other dependencies
def (GradleDependencyGenerator mavenrepo, File rulesJsonFile) = dependencyLockAlignInteractionSetupWithUnlockedResolutionRulesConfiguration()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:11.+'
}
}
apply plugin: 'nebula.resolution-rules'
apply plugin: 'nebula.dependency-lock'
apply plugin: 'java'
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencies {
resolutionRules files('$rulesJsonFile')
implementation 'test.nebula:a:1.41.5'
implementation 'test.nebula:b:1.42.2'
}
""".stripIndent()
when:
def results
def tasks = ['dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreAlignmentSupport=true", "-Dnebula.features.coreLockingSupport=false"]
results = runTasksAndFail(*tasks)
then:
assert results.output.contains('Dependency lock state is out of date:')
assert results.output.contains("Resolved 'test.nebula:a:1.42.2' instead of locked version '1.41.5' for project")
assert results.output.contains('+--- test.nebula:a:1.41.5 -> 1.42.2\n')
assert results.output.contains('\\--- test.nebula:b:1.42.2\n')
when:
def ignoreLocksResults = runTasks('dependencies', '--configuration', 'compileClasspath', '-PdependencyLock.ignore=true', "-Dnebula.features.coreAlignmentSupport=true", "-Dnebula.features.coreLockingSupport=false")
then:
ignoreLocksResults.output.contains '+--- test.nebula:a:1.41.5 -> 1.42.2\n'
ignoreLocksResults.output.contains '\\--- test.nebula:b:1.42.2\n'
when:
runTasks('generateLock', 'saveLock')
def locksUpdatedResults = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
locksUpdatedResults.output.contains '+--- test.nebula:a:1.41.5 -> 1.42.2\n'
locksUpdatedResults.output.contains '\\--- test.nebula:b:1.42.2\n'
}
@Unroll
def 'dependency-lock causes alignment to short circuit if dependencies are aligned by the lock file '() {
def (GradleDependencyGenerator mavenrepo, File jsonRuleFile) = dependencyLockAlignInteractionSetupWithUnlockedResolutionRulesConfiguration()
assert jsonRuleFile.exists()
assert jsonRuleFile.text.contains('"group": "test.nebula"')
def dependencyLock = new File(projectDir, 'dependencies.lock')
dependencyLock.delete()
dependencyLock << '''\
{
"compileClasspath": {
"test.nebula:a": { "locked": "1.41.5" },
"test.nebula:b": { "locked": "1.41.5" }
}
}
'''.stripIndent()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:11.+'
}
}
apply plugin: 'nebula.resolution-rules'
apply plugin: 'nebula.dependency-lock'
apply plugin: 'java'
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencies {
resolutionRules files('$jsonRuleFile')
implementation 'test.nebula:a:1.41.5'
implementation 'test.nebula:b:1.42.2'
}
""".stripIndent()
when:
def results = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
!results.output.contains('aligning test.nebula:a to [1.41.5,1.42.2]')
results.output.contains '+--- test.nebula:a:1.41.5\n'
results.output.contains '\\--- test.nebula:b:1.42.2 -> 1.41.5'
}
private List dependencyLockAlignInteractionSetupWithLockedResolutionRulesConfiguration() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.41.5')
.addModule('test.nebula:a:1.42.2')
.addModule('test.nebula:b:1.41.5')
.addModule('test.nebula:b:1.42.2')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "$projectDir/testrepogen")
mavenrepo.generateTestMavenRepo()
def rulesFolder = new File(projectDir, 'rules')
rulesFolder.mkdirs()
def rulesJsonFile = new File(rulesFolder, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [], "align": []
}
'''.stripIndent()
def mavenForRules = new File(projectDir, 'repo')
mavenForRules.mkdirs()
def locked = new File(mavenForRules, 'test/rules/resolution-rules/1.0.0')
locked.mkdirs()
createRulesJar([rulesFolder], projectDir, new File(locked, 'resolution-rules-1.0.0.jar'))
createPom('test.rules', 'resolution-rules', '1.0.0', locked)
rulesJsonFile.text = '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
def newer = new File(mavenForRules, 'test/rules/resolution-rules/1.1.0')
newer.mkdirs()
createRulesJar([rulesFolder], projectDir, new File(newer, 'resolution-rules-1.1.0.jar'))
createPom('test.rules', 'resolution-rules', '1.1.0', newer)
def mavenMetadataXml = new File(mavenForRules, 'test/rules/resolution-rules/maven-metadata.xml')
mavenMetadataXml.createNewFile()
mavenMetadataXml << '''<?xml version="1.0" encoding="UTF-8"?>
<metadata>
<groupId>test.rules</groupId>
<artifactId>resolution-rules</artifactId>
<versioning>
<latest>1.1.0</latest>
<release>1.1.0</release>
<versions>
<version>1.0.0</version>
<version>1.1.0</version>
</versions>
<lastUpdated>20200320014943</lastUpdated>
</versioning>
</metadata>
'''
def dependencyLock = new File(projectDir, 'dependencies.lock')
dependencyLock << '''\
{
"compileClasspath": {
"test.nebula:a": { "locked": "1.41.5" },
"test.nebula:b": { "locked": "1.42.2" }
},
"resolutionRules": {
"test.rules:resolution-rules": { "locked": "1.0.0" }
}
}
'''.stripIndent()
[mavenrepo, mavenForRules, rulesJsonFile]
}
private List dependencyLockAlignInteractionSetupWithUnlockedResolutionRulesConfiguration() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.41.5')
.addModule('test.nebula:a:1.42.2')
.addModule('test.nebula:b:1.41.5')
.addModule('test.nebula:b:1.42.2')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "$projectDir/testrepogen")
mavenrepo.generateTestMavenRepo()
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
def dependencyLock = new File(projectDir, 'dependencies.lock')
dependencyLock << '''\
{
"compileClasspath": {
"test.nebula:a": { "locked": "1.41.5" },
"test.nebula:b": { "locked": "1.42.2" }
}
}
'''.stripIndent()
[mavenrepo, rulesJsonFile]
}
@Unroll
@Issue('#55')
def 'alignment does not infinite loop on force to non existent version'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:0.15.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:0.15.0')
.addModule('test.nebula:c:0.15.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
addSubproject('common', '''\
apply plugin: 'java-library'
dependencyRecommendations {
map recommendations: [
'test.nebula:a': '0.15.0',
'test.nebula:b': '0.15.0',
'test.nebula:c': '0.15.0'
]
}
dependencies {
api 'test.nebula:a'
api 'test.nebula:b'
api 'test.nebula:c'
}
'''.stripIndent())
addSubproject('app', '''\
apply plugin: 'java'
configurations.compileClasspath.resolutionStrategy {
force 'test.nebula:c:1.0.0'
}
dependencies {
implementation project(':common')
implementation 'test.nebula:a:1.0.0'
}
'''.stripIndent())
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies { classpath 'com.netflix.nebula:nebula-dependency-recommender:10.0.1' }
}
allprojects {
apply plugin: 'nebula.resolution-rules'
apply plugin: 'nebula.dependency-recommender'
}
dependencies {
resolutionRules files('$rulesJsonFile')
}
subprojects {
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
}
""".stripIndent()
when:
def result = runTasks(':app:dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
result.output.contains '| +--- test.nebula:b FAILED'
result.output.contains '| \\--- test.nebula:c -> 1.0.0 FAILED'
}
@Unroll
@Issue('#55')
def 'alignment does not infinite loop on force to non existent version with recommender strictMode'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:0.15.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:0.15.0')
.addModule('test.nebula:c:0.15.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
def rulesJsonFile = new File(projectDir, 'rules.json')
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
addSubproject('common', '''\
apply plugin: 'java-library'
dependencyRecommendations {
map recommendations: [
'test.nebula:a': '0.15.0',
'test.nebula:b': '0.15.0',
'test.nebula:c': '0.15.0'
]
}
dependencies {
api 'test.nebula:a'
api 'test.nebula:b'
api 'test.nebula:c'
}
'''.stripIndent())
addSubproject('app', '''\
apply plugin: 'java'
configurations.compileClasspath.resolutionStrategy {
force 'test.nebula:c:1.0.0'
}
dependencies {
implementation project(':common')
implementation 'test.nebula:a:1.0.0'
}
'''.stripIndent())
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies { classpath 'com.netflix.nebula:nebula-dependency-recommender:10.0.1' }
}
allprojects {
apply plugin: 'nebula.resolution-rules'
apply plugin: 'nebula.dependency-recommender'
dependencyRecommendations {
strictMode = true
}
}
dependencies {
resolutionRules files('$rulesJsonFile')
}
subprojects {
apply plugin: 'java'
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
}
""".stripIndent()
when:
def result = runTasks(':app:dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
result.output.contains '| +--- test.nebula:b FAILED'
result.output.contains '| \\--- test.nebula:c -> 1.0.0 FAILED'
def expectedMessage = 'Dependency test.nebula:a omitted version with no recommended version. General causes include a dependency being removed from the recommendation source or not applying a recommendation source to a project that depends on another project using a recommender.'
result.output.contains(expectedMessage)
}
@Unroll
def 'dependency-lock when applied before wins out over new locked alignment rules '() {
def (GradleDependencyGenerator mavenrepo, File mavenForRules, File jsonRuleFile) = dependencyLockAlignInteractionSetupWithLockedResolutionRulesConfiguration()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:11.+'
}
}
apply plugin: 'nebula.dependency-lock'
apply plugin: 'nebula.resolution-rules'
apply plugin: 'java'
repositories {
${mavenrepo.mavenRepositoryBlock}
maven { url '${mavenForRules.absolutePath}' }
}
dependencies {
resolutionRules 'test.rules:resolution-rules:1.+'
implementation 'test.nebula:a:1.41.5'
implementation 'test.nebula:b:1.42.2'
}
""".stripIndent()
when:
def results = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
def resultsForRules = runTasks('dependencies', '--configuration', 'resolutionRules', "-Dnebula.features.coreLockingSupport=false")
then:
// results using resolution rules that do not yet align test.nebula
results.output.contains 'test.nebula:a:1.41.5\n'
results.output.contains 'test.nebula:b:1.42.2\n'
resultsForRules.output.contains 'test.rules:resolution-rules:1.+ -> 1.0.0\n'
when:
def resultsIgnoringLocks = runTasks('-PdependencyLock.ignore=true', 'dependencies', '--configuration', 'compileClasspath')
def resultsForRulesIgnoringLocks = runTasks('-PdependencyLock.ignore=true', 'dependencies', '--configuration', 'resolutionRules')
then:
// final results if we ignore locks
resultsIgnoringLocks.output.contains 'test.nebula:a:1.41.5 -> 1.42.2\n'
resultsIgnoringLocks.output.contains 'test.nebula:b:1.42.2\n'
resultsForRulesIgnoringLocks.output.contains 'test.rules:resolution-rules:1.+ -> 1.1.0\n'
}
@Unroll
def 'dependency-lock plugin applied before resolution-rules plugin with non-locked resolution rules - fail due to dependency lock state is out of date '() {
// note: this is a more unusual case. Typically resolution rules are distributed like a library, version controlled, and locked like other dependencies
def (GradleDependencyGenerator mavenrepo, File rulesJsonFile) = dependencyLockAlignInteractionSetupWithUnlockedResolutionRulesConfiguration()
buildFile << """\
buildscript {
repositories { mavenCentral() }
dependencies {
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:11.+'
}
}
apply plugin: 'nebula.dependency-lock'
apply plugin: 'nebula.resolution-rules'
apply plugin: 'java'
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencies {
resolutionRules files('$rulesJsonFile')
implementation 'test.nebula:a:1.41.5'
implementation 'test.nebula:b:1.42.2'
}
""".stripIndent()
when:
def tasks = ['dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreAlignmentSupport=true", "-Dnebula.features.coreLockingSupport=false"]
def results = runTasksAndFail(*tasks)
then:
assert results.output.contains('Dependency lock state is out of date:')
assert results.output.contains("Resolved 'test.nebula:a:1.42.2' instead of locked version '1.41.5' for project")
assert results.output.contains('+--- test.nebula:a:1.41.5 -> 1.42.2\n')
assert results.output.contains('\\--- test.nebula:b:1.42.2\n')
when:
def ignoreLocksResults = runTasks('dependencies', '--configuration', 'compileClasspath', '-PdependencyLock.ignore=true', "-Dnebula.features.coreAlignmentSupport=true", "-Dnebula.features.coreLockingSupport=false")
then:
ignoreLocksResults.output.contains '+--- test.nebula:a:1.41.5 -> 1.42.2\n'
ignoreLocksResults.output.contains '\\--- test.nebula:b:1.42.2\n'
!ignoreLocksResults.output.contains('FAILED')
when:
runTasks('generateLock', 'saveLock')
def locksUpdatedResults = runTasks('dependencies', '--configuration', 'compileClasspath', "-Dnebula.features.coreLockingSupport=false")
then:
locksUpdatedResults.output.contains '+--- test.nebula:a:1.41.5 -> 1.42.2\n'
locksUpdatedResults.output.contains '\\--- test.nebula:b:1.42.2\n'
}
private createRulesJar(Collection<File> files, File unneededRoot, File destination) {
Manifest manifest = new Manifest()
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, '1.0')
JarOutputStream target = new JarOutputStream(new FileOutputStream(destination), manifest)
files.each { add(it, unneededRoot, target) }
target.close()
}
private createPom(String group, String name, String version, File dir) {
def pom = new File(dir, "${name}-${version}.pom")
pom.text = """\
<?xml version="1.0" encoding="UTF-8"?>
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>${group}</groupId>
<artifactId>${name}</artifactId>
<version>${version}</version>
</project>
""".stripIndent()
}
private void add(File source, File unneededRoot, JarOutputStream target) throws IOException {
def prefix = "${unneededRoot.path}/"
if (source.isDirectory()) {
String dirName = source.path - prefix
if (!dirName.endsWith('/')) {
dirName += '/'
}
def entry = new JarEntry(dirName)
target.putNextEntry(entry)
target.closeEntry()
source.listFiles().each { nested ->
add(nested, unneededRoot, target)
}
} else {
def entry = new JarEntry(source.path - prefix)
target.putNextEntry(entry)
target << source.bytes
target.closeEntry()
}
}
}
<|start_filename|>src/main/kotlin/nebula/plugin/resolutionrules/plugin.kt<|end_filename|>
/*
* Copyright 2015-2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package nebula.plugin.resolutionrules
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.kotlin.readValue
import com.netflix.nebula.interop.onExecute
import com.netflix.nebula.interop.onResolve
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.artifacts.Configuration
import org.gradle.api.artifacts.ConfigurationContainer
import org.gradle.api.logging.Logger
import org.gradle.api.logging.Logging
import org.gradle.api.provider.Property
import org.gradle.api.provider.Provider
import org.gradle.api.services.BuildService
import org.gradle.api.services.BuildServiceParameters
import java.io.File
import java.io.Serializable
import java.util.zip.ZipEntry
import java.util.zip.ZipFile
import javax.inject.Inject
const val RESOLUTION_RULES_CONFIG_NAME = "resolutionRules"
class ResolutionRulesPlugin : Plugin<Project> {
private lateinit var project: Project
private lateinit var configurations: ConfigurationContainer
private lateinit var extension: NebulaResolutionRulesExtension
private val ignoredConfigurationPrefixes = listOf(RESOLUTION_RULES_CONFIG_NAME, SPRING_VERSION_MANAGEMENT_CONFIG_NAME,
NEBULA_RECOMMENDER_BOM_CONFIG_NAME, SCALA_INCREMENTAL_ANALYSIS_CONFIGURATION_PREFIX, KTLINT_CONFIGURATION_PREFIX, REPOSITORY_CONTENT_DESCRIPTOR_CONFIGURATION_PREFIX)
private val ignoredConfigurationSuffixes = listOf(PMD_CONFIGURATION_SUFFIX)
companion object {
val Logger: Logger = Logging.getLogger(ResolutionRulesPlugin::class.java)
const val NEBULA_RECOMMENDER_BOM_CONFIG_NAME: String = "nebulaRecommenderBom"
const val SPRING_VERSION_MANAGEMENT_CONFIG_NAME = "versionManagement"
const val KTLINT_CONFIGURATION_PREFIX = "ktlint"
const val PMD_CONFIGURATION_SUFFIX = "PmdAuxClasspath"
const val SCALA_INCREMENTAL_ANALYSIS_CONFIGURATION_PREFIX = "incrementalScalaAnalysis"
const val REPOSITORY_CONTENT_DESCRIPTOR_CONFIGURATION_PREFIX = "repositoryContentDescriptor"
const val JSON_EXT = ".json"
const val JAR_EXT = ".jar"
const val ZIP_EXT = ".zip"
const val OPTIONAL_PREFIX = "optional-"
}
override fun apply(project: Project) {
this.project = project
configurations = project.configurations
extension =
project.extensions.create("nebulaResolutionRules", NebulaResolutionRulesExtension::class.java, project)
val rootProject = project.rootProject
val configuration = project.configurations.maybeCreate(RESOLUTION_RULES_CONFIG_NAME)
if (project != rootProject) {
configuration.isCanBeConsumed = false
val rootProjectDependency = project.dependencies.project(
mapOf("path" to rootProject.path, "configuration" to RESOLUTION_RULES_CONFIG_NAME)
)
configuration.withDependencies { dependencies ->
dependencies.add(rootProjectDependency)
}
}
if (rootProject.extensions.findByType(NebulaResolutionRulesExtension::class.java) == null) {
rootProject.extensions.create(
"nebulaResolutionRules",
NebulaResolutionRulesExtension::class.java,
rootProject
)
}
project.configurations.all { config ->
if (ignoredConfigurationPrefixes.any { config.name.startsWith(it) }) {
return@all
}
if (ignoredConfigurationSuffixes.any { config.name.endsWith(it) }) {
return@all
}
var dependencyRulesApplied = false
project.onExecute {
val ruleSet = extension.ruleSet()
when {
config.state != Configuration.State.UNRESOLVED || config.getObservedState() != Configuration.State.UNRESOLVED -> Logger.warn(
"Dependency resolution rules will not be applied to $config, it was resolved before the project was executed"
)
else -> {
ruleSet.dependencyRulesPartOne().forEach { rule ->
rule.apply(project, config, config.resolutionStrategy, extension)
}
ruleSet.dependencyRulesPartTwo().forEach { rule ->
rule.apply(project, config, config.resolutionStrategy, extension)
}
dependencyRulesApplied = true
}
}
}
}
}
}
@Suppress("UnstableApiUsage")
abstract class NebulaResolutionRulesService : BuildService<NebulaResolutionRulesService.Params> {
companion object {
private val Logger: Logger = Logging.getLogger(NebulaResolutionRulesService::class.java)
private val Mapper = objectMapper()
fun registerService(project: Project): Provider<NebulaResolutionRulesService> {
return project.gradle.sharedServices.registerIfAbsent(
"nebulaResolutionRules",
NebulaResolutionRulesService::class.java
) { spec ->
val resolutionRules = resolveResolutionRules(project)
spec.parameters.getResolutionRules().set(ResolutionRules(resolutionRules))
}
}
private fun resolveResolutionRules(project: Project): Map<String, RuleSet> {
val configuration = project.configurations.getByName(RESOLUTION_RULES_CONFIG_NAME)
val files = configuration.resolve()
val rules = LinkedHashMap<String, RuleSet>()
for (file in files) {
val filename = file.name
Logger.debug("nebula.resolution-rules uses: $filename")
if (filename.endsWith(ResolutionRulesPlugin.JSON_EXT)) {
rules.putRules(Mapper.parseJsonFile(file))
} else if (filename.endsWith(ResolutionRulesPlugin.JAR_EXT) || filename.endsWith(ResolutionRulesPlugin.ZIP_EXT)) {
Logger.info("nebula.resolution-rules is using ruleset: $filename")
ZipFile(file).use { zip ->
val entries = zip.entries()
while (entries.hasMoreElements()) {
val entry = entries.nextElement()
if (entry.name.endsWith(ResolutionRulesPlugin.JSON_EXT)) {
rules.putRules(Mapper.parseJsonStream(zip, entry))
}
}
}
} else {
Logger.debug("Unsupported rules file extension for $file")
}
}
return rules
}
private fun MutableMap<String, RuleSet>.putRules(ruleSet: RuleSet) {
if (put(ruleSet.name!!, ruleSet) != null) {
Logger.info("Found rules with the same name. Overriding existing ruleset ${ruleSet.name}")
}
}
private fun ruleSetName(filename: String) =
filename.substring(0, filename.lastIndexOf(ResolutionRulesPlugin.JSON_EXT))
private fun ObjectMapper.parseJsonFile(file: File): RuleSet {
val ruleSetName = ruleSetName(file.name)
Logger.debug("Using $ruleSetName (${file.name}) a dependency rules source")
return readValue<RuleSet>(file).withName(ruleSetName)
}
private fun ObjectMapper.parseJsonStream(zip: ZipFile, entry: ZipEntry): RuleSet {
val ruleSetName = ruleSetName(File(entry.name).name)
Logger.debug("Using $ruleSetName (${zip.name}) a dependency rules source")
return readValue<RuleSet>(zip.getInputStream(entry)).withName(ruleSetName)
}
}
interface Params : BuildServiceParameters {
fun getResolutionRules(): Property<ResolutionRules>
}
class ResolutionRules(val byFile: Map<String, RuleSet>) : Serializable
}
open class NebulaResolutionRulesExtension @Inject constructor(private val project: Project) {
var include = ArrayList<String>()
var optional = ArrayList<String>()
var exclude = ArrayList<String>()
fun ruleSet(): RuleSet {
val service = NebulaResolutionRulesService.registerService(project).get()
@Suppress("UnstableApiUsage") val rulesByFile = service.parameters
.getResolutionRules()
.get()
.byFile
return rulesByFile.filterKeys { ruleSet ->
when {
ruleSet.startsWith(ResolutionRulesPlugin.OPTIONAL_PREFIX) -> {
val ruleSetWithoutPrefix = ruleSet.substring(ResolutionRulesPlugin.OPTIONAL_PREFIX.length)
optional.contains(ruleSetWithoutPrefix)
}
include.isNotEmpty() -> include.contains(ruleSet)
else -> !exclude.contains(ruleSet)
}
}.values.flatten()
}
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignRulesForceStrictlyWithSubstitutionSpec.groovy<|end_filename|>
/*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nebula.plugin.resolutionrules
import nebula.test.dependencies.DependencyGraphBuilder
import nebula.test.dependencies.GradleDependencyGenerator
import nebula.test.dependencies.ModuleBuilder
import spock.lang.Unroll
class AlignRulesForceStrictlyWithSubstitutionSpec extends AbstractAlignRulesSpec {
def setup() {
setupProjectAndDependencies()
debug = true
}
@Unroll
def 'force to good version while substitution is triggered by a transitive dependency'() {
buildFile << """\
dependencies {
implementation('test.nebula:a:1.1.0') {
force = true
}
implementation 'test.nebula:b:1.0.0' // added for alignment
implementation 'test.nebula:c:1.0.0' // added for alignment
implementation 'test.other:z:1.0.0' // brings in bad version
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula', '--warning-mode', 'none']
def results = runTasks(*tasks)
then:
// force to an okay version is the primary contributor; the substitution rule was a secondary contributor
results.output.contains 'test.nebula:a:1.2.0 -> 1.1.0\n'
results.output.contains 'test.nebula:b:1.0.0 -> 1.1.0\n'
results.output.contains 'test.nebula:c:1.0.0 -> 1.1.0\n'
results.output.contains 'aligned'
results.output.contains '- Forced'
results.output.contains "- Selected by rule : substituted test.nebula:a:1.2.0 with test.nebula:a:1.3.0 because '★ custom substitution reason'"
}
@Unroll
def 'force to bad version triggers a substitution'() {
buildFile << """\
dependencies {
implementation('test.nebula:a:1.2.0') {
force = true // force to bad version triggers a substitution
}
implementation 'test.nebula:b:1.0.0' // added for alignment
implementation 'test.nebula:c:1.0.0' // added for alignment
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula', '--warning-mode', 'none']
def results = runTasks(*tasks)
then:
// substitution rule to a known-good-version was the primary contributor; force to a bad version was a secondary contributor
assert results.output.contains('test.nebula:a:1.2.0 -> 1.3.0\n')
assert results.output.contains('test.nebula:b:1.0.0 -> 1.3.0\n')
assert results.output.contains('test.nebula:c:1.0.0 -> 1.3.0\n')
results.output.contains 'aligned'
results.output.contains('- Forced')
results.output.contains "- Selected by rule : substituted test.nebula:a:1.2.0 with test.nebula:a:1.3.0 because '★ custom substitution reason'"
}
@Unroll
def 'force to a good version while substitution is triggered by a direct dependency'() {
buildFile << """\
dependencies {
implementation('test.nebula:a:1.1.0') {
force = true // force to good version
}
implementation 'test.nebula:b:1.0.0' // added for alignment
implementation 'test.nebula:c:1.2.0' // bad version
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula', '--warning-mode', 'none']
def results = runTasks(*tasks)
then:
// force to an okay version is the primary contributor; the substitution rule was a secondary contributor
results.output.contains 'test.nebula:a:1.1.0\n'
results.output.contains 'test.nebula:b:1.0.0 -> 1.1.0\n'
results.output.contains 'test.nebula:c:1.2.0 -> 1.1.0\n'
results.output.contains 'aligned'
results.output.toLowerCase().contains 'forced'
results.output.contains "- Selected by rule : substituted test.nebula:c:1.2.0 with test.nebula:c:1.3.0 because '★ custom substitution reason'"
}
@Unroll
def 'resolution strategy force to good version while substitution is triggered by a transitive dependency'() {
buildFile << """\
configurations.all {
resolutionStrategy {
force 'test.nebula:a:1.1.0'
}
}
dependencies {
implementation 'test.nebula:a:1.1.0'
implementation 'test.nebula:b:1.0.0' // added for alignment
implementation 'test.nebula:c:1.0.0' // added for alignment
implementation 'test.other:z:1.0.0' // brings in bad version
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
assert results.output.contains('Multiple forces on different versions for virtual platform')
assert results.output.contains('test.nebula:a:1.1.0 FAILED')
results.output.contains 'aligned'
results.output.contains '- Forced'
results.output.contains "- Selected by rule : substituted test.nebula:a:1.2.0 with test.nebula:a:1.3.0 because '★ custom substitution reason'"
}
@Unroll
def 'resolution strategy force to bad version triggers a substitution'() {
buildFile << """\
configurations.all {
resolutionStrategy {
force 'test.nebula:a:1.2.0' // force to bad version triggers a substitution
}
}
dependencies {
implementation 'test.nebula:a:1.2.0' // bad version
implementation 'test.nebula:b:1.0.0' // added for alignment
implementation 'test.nebula:c:1.0.0' // added for alignment
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
// substitution rule to a known-good-version was the primary contributor; force to a bad version was a secondary contributor
assert results.output.contains('test.nebula:a:1.2.0 -> 1.3.0\n')
assert results.output.contains('test.nebula:b:1.0.0 -> 1.3.0\n')
assert results.output.contains('test.nebula:c:1.0.0 -> 1.3.0\n')
results.output.contains 'aligned'
results.output.contains('- Forced')
results.output.contains "- Selected by rule : substituted test.nebula:a:1.2.0 with test.nebula:a:1.3.0 because '★ custom substitution reason'"
}
@Unroll
def 'resolution strategy force to a good version while substitution is triggered by a direct dependency'() {
buildFile << """\
configurations.all {
resolutionStrategy {
force 'test.nebula:a:1.1.0'
}
}
dependencies {
implementation 'test.nebula:a:1.1.0'
implementation 'test.nebula:b:1.0.0' // added for alignment
implementation 'test.nebula:c:1.2.0' // bad version
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
// force to an okay version is the primary contributor; the substitution rule was a secondary contributor
results.output.contains 'test.nebula:a:1.1.0\n'
results.output.contains 'test.nebula:b:1.0.0 -> 1.1.0\n'
results.output.contains 'test.nebula:c:1.2.0 -> 1.1.0\n'
results.output.contains 'aligned'
results.output.toLowerCase().contains 'forced'
results.output.contains "- Selected by rule : substituted test.nebula:c:1.2.0 with test.nebula:c:1.3.0 because '★ custom substitution reason'"
}
@Unroll
def 'dependency with strict version declaration to a good version while a substitution is triggered by a transitive dependency'() {
buildFile << """\
dependencies {
implementation('test.nebula:a:1.1.0') {
version { strictly '1.1.0' }
}
implementation 'test.nebula:b:1.0.0' // added for alignment
implementation 'test.nebula:c:1.0.0' // added for alignment
implementation 'test.other:z:1.0.0' // brings in bad version
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
// strictly rich version constraint to an okay version is the primary contributor
assert results.output.contains('test.nebula:a:{strictly 1.1.0} -> 1.1.0\n')
assert results.output.contains('test.nebula:a:1.2.0 -> 1.1.0\n')
assert results.output.contains('test.nebula:b:1.0.0 -> 1.1.0\n')
assert results.output.contains('test.nebula:c:1.0.0 -> 1.1.0\n')
assert results.output.contains('- Forced')
results.output.contains 'aligned'
results.output.contains("- Selected by rule : substituted test.nebula:a:1.2.0 with test.nebula:a:1.3.0 because '★ custom substitution reason'")
}
@Unroll
def 'dependency with strict version declaration to a bad version triggers a substitution'() {
buildFile << """\
dependencies {
implementation('test.nebula:a') {
version { strictly '1.2.0' } // strict to bad version
}
implementation 'test.nebula:b:1.0.0' // added for alignment
implementation 'test.nebula:c:1.0.0' // added for alignment
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
// substitution rule to a known-good-version is the primary contributor; rich version strictly constraint to a bad version is the secondary contributor
results.output.contains 'test.nebula:a:{strictly 1.2.0} -> 1.3.0'
results.output.contains 'test.nebula:b:1.0.0 -> 1.3.0'
results.output.contains 'test.nebula:c:1.0.0 -> 1.3.0'
results.output.contains 'aligned'
results.output.contains "- Selected by rule : substituted test.nebula:a:1.2.0 with test.nebula:a:1.3.0 because '★ custom substitution reason'"
}
@Unroll
def 'dependency with strict version declaration to a good version while substitution is triggered by a direct dependency'() {
buildFile << """\
dependencies {
implementation('test.nebula:a') {
version { strictly '1.1.0' }
}
implementation('test.nebula:b') { // added for alignment
version { strictly '1.1.0' }
}
implementation'test.nebula:c:1.2.0' // brings in bad version
}
""".stripIndent()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
// rich version strictly declaration to an okay version is the primary contributor; the substitution rule was a secondary contributor
assert results.output.contains('test.nebula:a:{strictly 1.1.0} -> 1.1.0')
assert results.output.contains('test.nebula:b:{strictly 1.1.0} -> 1.1.0')
assert results.output.contains('test.nebula:c:1.2.0 -> 1.1.0')
assert results.output.contains('- Forced')
results.output.contains 'aligned'
results.output.contains("- Selected by rule : substituted test.nebula:c:1.2.0 with test.nebula:c:1.3.0 because '★ custom substitution reason'")
}
@Unroll
def 'dependency constraint with strict version declaration to a good version while a substitution is triggered by a transitive dependency'() {
buildFile << """\
dependencies {
constraints {
implementation('test.nebula:a') {
version { strictly("1.1.0") }
because '☘︎ custom constraint: test.nebula:a should be 1.1.0'
}
}
implementation 'test.other:z:1.0.0' // brings in bad version
implementation 'test.brings-b:b:1.0.0' // added for alignment
implementation 'test.brings-c:c:1.0.0' // added for alignment
}
""".stripIndent()
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.brings-b:b:1.0.0').addDependency('test.nebula:b:1.0.0').build())
.addModule(new ModuleBuilder('test.brings-a:a:1.0.0').addDependency('test.nebula:a:1.0.0').build())
.addModule(new ModuleBuilder('test.brings-c:c:1.0.0').addDependency('test.nebula:c:1.0.0').build())
.build()
new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
// strictly rich version constraint to an okay version is the primary contributor
results.output.contains('test.nebula:a:{strictly 1.1.0} -> 1.1.0\n')
results.output.contains('test.nebula:a:1.2.0 -> 1.1.0\n')
results.output.contains('test.nebula:b:1.0.0 -> 1.1.0\n')
results.output.contains('test.nebula:c:1.0.0 -> 1.1.0\n')
results.output.contains('- Forced')
results.output.contains 'aligned'
results.output.contains("- Selected by rule : substituted test.nebula:a:1.2.0 with test.nebula:a:1.3.0 because '★ custom substitution reason'")
results.output.contains 'By ancestor'
}
@Unroll
def 'dependency constraint with strict version declaration to a bad version triggers a substitution'() {
buildFile << """\
dependencies {
constraints {
implementation('test.nebula:a') {
version { strictly("1.2.0") }
because '☘︎ custom constraint: test.nebula:a should be 1.2.0'
}
}
implementation 'test.brings-a:a:1.0.0' // added for alignment
implementation 'test.brings-b:b:1.0.0' // added for alignment
implementation 'test.brings-c:c:1.0.0' // added for alignment
}
""".stripIndent()
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.brings-b:b:1.0.0').addDependency('test.nebula:b:1.0.0').build())
.addModule(new ModuleBuilder('test.brings-a:a:1.0.0').addDependency('test.nebula:a:1.0.0').build())
.addModule(new ModuleBuilder('test.brings-c:c:1.0.0').addDependency('test.nebula:c:1.0.0').build())
.build()
new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
// substitution rule to a known-good-version is the primary contributor; rich version strictly constraint to a bad version is the secondary contributor
results.output.contains 'test.nebula:a:{strictly 1.2.0} -> 1.3.0'
results.output.contains 'test.nebula:b:1.0.0 -> 1.3.0'
results.output.contains 'test.nebula:c:1.0.0 -> 1.3.0'
results.output.contains 'aligned'
results.output.contains "- Selected by rule : substituted test.nebula:a:1.2.0 with test.nebula:a:1.3.0 because '★ custom substitution reason'"
}
@Unroll
def 'dependency constraint with strict version declaration to a good version while substitution is triggered by a direct dependency'() {
buildFile << """\
dependencies {
constraints {
implementation('test.nebula:a') {
version { strictly("1.1.0") }
because '☘︎ custom constraint: test.nebula:a should be 1.1.0'
}
implementation('test.nebula:b') {
version { strictly("1.1.0") }
because '☘︎ custom constraint: test.nebula:b should be 1.1.0'
}
}
implementation 'test.brings-b:b:1.0.0' // added for alignment
implementation 'test.brings-a:a:1.0.0' // added for alignment
implementation'test.nebula:c:1.2.0' // brings in bad version
}
""".stripIndent()
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.brings-b:b:1.0.0').addDependency('test.nebula:b:1.0.0').build())
.addModule(new ModuleBuilder('test.brings-a:a:1.0.0').addDependency('test.nebula:a:1.0.0').build())
.build()
new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
when:
def tasks = ['dependencyInsight', '--dependency', 'test.nebula']
def results = runTasks(*tasks)
then:
// rich version strictly declaration to an okay version is the primary contributor; the substitution rule was a secondary contributor
assert results.output.contains('test.nebula:a:{strictly 1.1.0} -> 1.1.0')
assert results.output.contains('test.nebula:b:{strictly 1.1.0} -> 1.1.0')
assert results.output.contains('test.nebula:c:1.2.0 -> 1.1.0')
assert results.output.contains('- Forced')
assert results.output.contains('By ancestor')
results.output.contains 'aligned'
results.output.contains("- Selected by rule : substituted test.nebula:c:1.2.0 with test.nebula:c:1.3.0 because '★ custom substitution reason'")
}
void setupProjectAndDependencies() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:1.1.0')
.addModule('test.nebula:a:1.2.0')
.addModule('test.nebula:a:1.3.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:1.1.0')
.addModule('test.nebula:b:1.2.0')
.addModule('test.nebula:b:1.3.0')
.addModule('test.nebula:c:1.0.0')
.addModule('test.nebula:c:1.1.0')
.addModule('test.nebula:c:1.2.0')
.addModule('test.nebula:c:1.3.0')
.addModule(new ModuleBuilder('test.other:z:1.0.0').addDependency('test.nebula:a:1.2.0').build())
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
String reason = "★ custom substitution reason"
rulesJsonFile << """
{
"substitute": [
{
"module": "test.nebula:a:1.2.0",
"with": "test.nebula:a:1.3.0",
"reason": "$reason",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
},
{
"module": "test.nebula:b:1.2.0",
"with": "test.nebula:b:1.3.0",
"reason": "$reason",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
},
{
"module": "test.nebula:c:1.2.0",
"with": "test.nebula:c:1.3.0",
"reason": "$reason",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
],
"align": [
{
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
""".stripIndent()
buildFile << """
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
""".stripIndent()
}
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignAndSubstituteRulesSpec.groovy<|end_filename|>
package nebula.plugin.resolutionrules
import nebula.test.IntegrationTestKitSpec
import nebula.test.dependencies.DependencyGraphBuilder
import nebula.test.dependencies.GradleDependencyGenerator
import nebula.test.dependencies.ModuleBuilder
import nebula.test.dependencies.maven.ArtifactType
import nebula.test.dependencies.maven.Pom
import nebula.test.dependencies.repositories.MavenRepo
import spock.lang.Issue
class AlignAndSubstituteRulesSpec extends IntegrationTestKitSpec {
File rulesJsonFile
File mavenrepo
String reason = "★ custom reason"
String alignRuleForTestNebula = """\
{
"group": "(test.nebula|test.nebula.ext)",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}""".stripIndent()
def setup() {
rulesJsonFile = new File(projectDir, "rules.json")
settingsFile << """\
rootProject.name = 'test-project'
""".stripIndent()
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:0.5.0')
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:1.0.1')
.addModule('test.nebula:a:1.0.2')
.addModule('test.nebula:a:1.0.3')
.addModule('test.nebula:a:1.1.0')
.addModule('test.nebula:b:0.5.0')
.addModule('test.nebula:b:0.6.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:1.0.1')
.addModule('test.nebula:b:1.0.2')
.addModule('test.nebula:b:1.0.3')
.addModule('test.nebula:b:1.1.0')
.addModule('test.nebula:c:1.0.0')
.addModule('test.nebula:c:1.0.1')
.addModule('test.nebula:c:1.0.2')
.addModule('test.nebula:c:1.0.3')
.addModule('test.nebula:c:1.1.0')
.addModule('test.nebula:c:1.4.0')
.addModule('test.beverage:d:1.0.0')
.addModule(new ModuleBuilder('test.other:e:1.0.0').addDependency('test.nebula:b:1.1.0').build())
.addModule(new ModuleBuilder('test.nebula:f:1.0.0').addDependency('test.nebula:a:1.0.0').build())
.addModule(new ModuleBuilder('test.nebula:f:1.0.1').addDependency('test.nebula:a:1.0.1').build())
.addModule(new ModuleBuilder('test.nebula:f:1.0.2').addDependency('test.nebula:a:1.0.2').build())
.addModule(new ModuleBuilder('test.nebula:f:1.0.3').addDependency('test.nebula:a:1.0.3').build())
.addModule(new ModuleBuilder('test.nebula:f:1.1.0').addDependency('test.nebula:a:1.1.0').build())
.addModule('test.nebula:g:1.0.0')
.addModule('test.nebula:g:1.0.1')
.addModule('test.nebula:g:1.0.2')
.addModule('test.nebula:g:1.0.3')
.addModule('test.nebula:g:1.1.0')
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
buildFile << baseBuildGradleFile()
debug = true
keepFiles = true
}
def 'statically defined dependency: sub & align from static version to higher static version'() {
given:
String substituteFromVersion = "1.0.1"
String substituteToVersion = "1.0.3"
String resultingVersion = "1.0.3"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align from static version to higher latest.release dynamic version'() {
given:
String substituteFromVersion = "1.0.1"
String substituteToVersion = "latest.release"
String resultingVersion = "1.1.0"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align from static version to higher minor-scoped dynamic version'() {
given:
String substituteFromVersion = "1.0.1"
String substituteToVersion = "1.+"
String resultingVersion = "1.1.0"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align from static version to lower substitute-to version '() {
given:
String substituteFromVersion = "1.0.1"
String substituteToVersion = "1.0.0"
String resultingVersion = "1.0.0"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align from range to higher static version'() {
given:
String substituteFromVersion = "[1.0.1,1.0.2]"
String substituteToVersion = "1.0.3"
String resultingVersion = "1.0.3"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align from range to higher latest.release dynamic version'() {
given:
String substituteFromVersion = "[1.0.1,1.0.2]"
String substituteToVersion = "latest.release"
String resultingVersion = "1.1.0"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align from range to higher minor-scoped dynamic version'() {
given:
String substituteFromVersion = "[1.0.1,1.0.2]"
String substituteToVersion = "1.+"
String resultingVersion = "1.1.0"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align from range to higher static version with higher minor version'() {
given:
String substituteFromVersion = "[1.0.1,1.0.2]"
String substituteToVersion = "1.1.0"
String resultingVersion = "1.1.0"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align from range to lower substitute-to version'() {
given:
String substituteFromVersion = "[1.0.1,1.0.2]"
String substituteToVersion = "1.0.0"
String resultingVersion = "1.0.0"
List<String> dependencyDefinitionVersions = ['1.0.1', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'narrowly defined dynamic dependency: sub & align from static version to higher static version that is not substituted-away-from'() {
given:
List<String> dependencyDefinitionVersions = ['1.0.+', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
substituteFromVersion = "1.0.3"
substituteToVersion = "1.1.0"
resultingVersion = "1.0.3" // FIXME: should resolve differently
}
def 'narrowly defined dynamic dependency: sub & align from static version to higher latest.release dynamic version in narrow definition that is not substituted-away-from'() {
given:
List<String> dependencyDefinitionVersions = ['1.0.+', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
substituteFromVersion = "1.0.3"
substituteToVersion = "latest.release"
resultingVersion = "1.0.3" // FIXME: should resolve differently
}
def 'narrowly defined dynamic dependency: sub & align from static version to higher minor-scoped dynamic version that is not substituted-away-from'() {
given:
List<String> dependencyDefinitionVersions = ['1.0.+', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
substituteFromVersion = "1.0.3"
substituteToVersion = "1.+"
resultingVersion = "1.0.3" // FIXME: should resolve differently
}
def 'narrowly defined dynamic dependency: sub & align from static version to conflict-resolved version that is is not substituted-away-from'() {
given:
List<String> dependencyDefinitionVersions = ['1.0.+', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
substituteFromVersion = "1.0.3"
substituteToVersion = "1.0.0"
resultingVersion = "1.0.3" // FIXME: should resolve differently
}
def 'narrowly defined dynamic dependency: sub & align from range to higher static version'() {
given:
List<String> dependencyDefinitionVersions = ['1.0.+', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
substituteFromVersion = "(,1.1.0)"
substituteToVersion = "1.1.0"
resultingVersion = "1.1.0"
}
def 'narrowly defined dynamic dependency: sub & align from range to higher latest.release dynamic version'() {
given:
List<String> dependencyDefinitionVersions = ['1.0.+', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
substituteFromVersion = "(,1.1.0)"
substituteToVersion = "latest.release"
resultingVersion = "1.1.0"
}
def 'narrowly defined dynamic dependency: sub & align from range to higher minor-scoped static version'() {
given:
List<String> dependencyDefinitionVersions = ['1.0.+', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
substituteFromVersion = "(,1.1.0)"
substituteToVersion = "1.+"
resultingVersion = "1.1.0"
}
def 'narrowly defined dynamic dependency: sub & align from range to lower substitute-to version'() {
given:
List<String> dependencyDefinitionVersions = ['1.0.+', '1.0.0']
setupForSimplestSubstitutionAndAlignmentCases(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
substituteFromVersion = "(,1.1.0)"
substituteToVersion = "0.5.0"
resultingVersion = "1.0.3" // only declared dependencies are substituted. v1.0.+ is not a declared dependency
}
def 'missing cases: statically defined dependency: fail to align when lower versions are missing'() {
given:
String definedVersion = "1.0.1"
String substituteFromVersion = "[1.0.0,1.1.0)"
String substituteToVersion = "0.5.0"
String AResultingVersion = "0.5.0"
String BResultingVersion = '0.6.0'
String CResultingVersion = "FAILED"
List<String> dependencyDefinitionVersions = [definedVersion, '0.6.0', definedVersion]
setupForSubstitutionAndAlignmentCasesWithMissingVersions(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", AResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", BResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", CResultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$BResultingVersion")
}
def 'missing cases: statically defined dependency: fail to align when higher versions are missing'() {
given:
String definedVersion = "1.0.1"
String substituteFromVersion = "[1.0.0,1.1.0)"
String substituteToVersion = "1.4.0"
String AResultingVersion = "FAILED"
String BResultingVersion = '0.6.0'
String CResultingVersion = "1.4.0"
List<String> dependencyDefinitionVersions = [definedVersion, '0.6.0', definedVersion]
setupForSubstitutionAndAlignmentCasesWithMissingVersions(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", AResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", BResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", CResultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$CResultingVersion")
}
def 'missing cases: dynamically defined dependency: when dynamic dependency definition and substitutions leave no viable versions'() {
given:
String definedVersion = "1.+"
String substituteFromVersion = "[1.0.0,)"
String substituteToVersion = "0.5.0"
String ABResultingVersion = "1.1.0"
String CResultingVersion = "1.4.0"
List<String> dependencyDefinitionVersions = [definedVersion, '0.6.0', definedVersion]
setupForSubstitutionAndAlignmentCasesWithMissingVersions(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", ABResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", ABResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", CResultingVersion)
def platformVersion = "1.4.0"
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$platformVersion")
}
def 'missing cases: dynamically defined dependency: when dynamic latest.release dependency definition and substitutions leave no viable versions for some lower aligned versions'() {
given:
String definedVersion = "latest.release"
String substituteFromVersion = "[1.0.0,)"
String substituteToVersion = "0.5.0"
String AResultingVersion = "1.1.0"
String BResultingVersion = "1.1.0"
String CResultingVersion = "1.4.0"
List<String> dependencyDefinitionVersions = [definedVersion, '0.6.0', definedVersion]
setupForSubstitutionAndAlignmentCasesWithMissingVersions(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", AResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", BResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", CResultingVersion)
def platformVersion = "1.4.0"
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$platformVersion")
}
def 'missing cases: dynamically defined dependency: when dependency dynamic definition and substitutions leave no viable versions for some higher aligned dependencies'() {
given:
String definedVersion = "1.+"
String substituteFromVersion = "[1.0.0,1.2.0)"
String substituteToVersion = "1.4.0"
String CResultingVersion = "1.4.0"
String ABResultingVersion = "1.1.0"
List<String> dependencyDefinitionVersions = [definedVersion, '0.6.0', definedVersion]
setupForSubstitutionAndAlignmentCasesWithMissingVersions(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", ABResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", ABResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", CResultingVersion)
result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$CResultingVersion")
}
def 'missing cases: narrowly defined dynamic dependency: when narrow dynamic dependency definition and substitutions leave no viable versions for some lower aligned dependencies'() {
given:
String definedVersion = "1.0.+"
String substituteFromVersion = "[1.0.0,1.1.0)"
String substituteToVersion = "0.5.0"
String ABCResultingVersion = "1.0.3" // FIXME: should resolve differently
List<String> dependencyDefinitionVersions = [definedVersion, '0.6.0', definedVersion]
setupForSubstitutionAndAlignmentCasesWithMissingVersions(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", ABCResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", ABCResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", ABCResultingVersion)
def platformVersion = "1.0.3"
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$platformVersion")
}
def 'missing cases: narrowly defined dynamic dependency: when narrow dynamic dependency definition and substitutions leave no viable versions for some higher aligned dependencies'() {
given:
String definedVersion = "1.0.+"
String substituteFromVersion = "[1.0.0,1.1.0)"
String substituteToVersion = "1.4.0"
String ABCResultingVersion = "1.0.3" // FIXME: should resolve differently
List<String> dependencyDefinitionVersions = [definedVersion, '0.6.0', definedVersion]
setupForSubstitutionAndAlignmentCasesWithMissingVersions(substituteFromVersion, substituteToVersion, dependencyDefinitionVersions)
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", ABCResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", ABCResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", ABCResultingVersion)
def platformVersion = "1.0.3"
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$platformVersion")
}
def 'substitute static version for other dependency latest.release and align direct deps'() {
given:
def module = "test.beverage:d:1.0.0"
def with = "test.nebula:b:latest.release"
createAlignAndSubstituteRule([(module.toString()): with])
buildFile << """
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.beverage:d:1.0.0'
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
def resultingVersion = "1.1.0"
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'substitute all versions for another dependency with static version and align direct and transitives higher'() {
given:
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.other:h:1.0.0').addDependency('test.nebula:b:1.0.2').build())
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
def module = "test.other:g"
def with = "test.other:h:1.0.0"
createAlignAndSubstituteRule([(module.toString()): with])
buildFile << """
dependencies {
implementation 'test.other:g:1.0.0'
implementation 'test.nebula:a:1.1.0'
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
def resultingVersion = "1.1.0"
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'apply a static version via a force and align results (no substitutions)'() {
given:
rulesJsonFile << """
{
"align": [
$alignRuleForTestNebula
]
}
""".stripIndent()
buildFile << """
dependencies {
implementation 'test.nebula:a:latest.release'
implementation 'test.nebula:b:latest.release'
}
configurations.all {
resolutionStrategy {
force 'test.nebula:a:1.0.2'
}
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
def resultingVersion = "1.0.2"
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'only brought in transitively: sub & align from static version to lower static version that is not substituted-away-from'() {
given:
String resultingVersion = "1.0.1"
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.other:brings-a:1.0.0').addDependency('test.nebula:a:1.0.2').build())
.addModule(new ModuleBuilder('test.other:also-brings-a:1.0.0').addDependency('test.nebula:a:1.0.3').build())
.addModule(new ModuleBuilder('test.other:brings-b:1.0.0').addDependency('test.nebula:b:1.0.3').build())
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
def substituteFromVersion = "[1.0.2,1.1.0]"
def substituteToVersion = "1.0.1"
Map<String, String> modulesAndSubstitutions = new HashMap<>()
modulesAndSubstitutions.put("test.nebula:a:$substituteFromVersion".toString(), "test.nebula:a:$substituteToVersion".toString())
modulesAndSubstitutions.put("test.nebula:b:$substituteFromVersion".toString(), "test.nebula:b:$substituteToVersion".toString())
createAlignAndSubstituteRule(modulesAndSubstitutions)
buildFile << """
dependencies {
implementation 'test.other:brings-a:latest.release'
implementation 'test.other:also-brings-a:latest.release'
implementation 'test.other:brings-b:latest.release'
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'only brought in transitively: core alignment fails with matching static substitution and force: #description'() {
given:
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.other:brings-a:1.0.0').addDependency('test.nebula:a:1.0.3').build())
.addModule(new ModuleBuilder('test.other:also-brings-a:1.0.0').addDependency('test.nebula:a:1.1.0').build())
.addModule(new ModuleBuilder('test.other:brings-b:1.0.0').addDependency('test.nebula:b:1.1.0').build())
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
def substituteFromVersion = "1.0.3"
def substituteToVersion = "1.1.0"
Map<String, String> modulesAndSubstitutions = new HashMap<>()
modulesAndSubstitutions.put("test.nebula:a:$substituteFromVersion".toString(), "test.nebula:a:$substituteToVersion".toString())
modulesAndSubstitutions.put("test.nebula:b:$substituteFromVersion".toString(), "test.nebula:b:$substituteToVersion".toString())
createAlignAndSubstituteRule(modulesAndSubstitutions)
def forceConfig = ''
if (useForce) {
forceConfig = """
force 'test.nebula:a:$forcedVersion'
force 'test.nebula:b:$forcedVersion'
""".stripIndent()
}
buildFile << """
dependencies {
implementation 'test.other:brings-a:latest.release'
implementation 'test.other:also-brings-a:latest.release'
implementation 'test.other:brings-b:latest.release'
}
configurations.all {
resolutionStrategy { $forceConfig }
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
if (resultingVersion != 'FAILED') {
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
where:
useForce | forcedVersion | resultingVersion | description
false | null | '1.1.0' | 'without a force'
// TODO: possibly use require-reject in lieu of resolutionStrategy.dependencySubstitution to fix this case
true | '1.0.2' | 'FAILED' | 'forced to a static version'
true | 'latest.release' | 'FAILED' | 'forced to latest.release'
}
def 'only brought in transitively: substitute with a range and align with a force'() {
given:
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.other:brings-a:1.0.0').addDependency('test.nebula:a:1.0.2').build())
.addModule(new ModuleBuilder('test.other:also-brings-a:1.0.0').addDependency('test.nebula:a:1.0.3').build())
.addModule(new ModuleBuilder('test.other:brings-b:1.0.0').addDependency('test.nebula:b:1.0.3').build())
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
def substituteFromVersion = "[1.0.2,1.1.0]"
def substituteToVersion = "1.0.1"
Map<String, String> modulesAndSubstitutions = new HashMap<>()
modulesAndSubstitutions.put("test.nebula:a:$substituteFromVersion".toString(), "test.nebula:a:$substituteToVersion".toString())
modulesAndSubstitutions.put("test.nebula:b:$substituteFromVersion".toString(), "test.nebula:b:$substituteToVersion".toString())
createAlignAndSubstituteRule(modulesAndSubstitutions)
String resultingVersion = "1.0.1"
buildFile << """
dependencies {
implementation 'test.other:brings-a:latest.release'
implementation 'test.other:also-brings-a:latest.release'
implementation 'test.other:brings-b:latest.release'
}
configurations.all {
resolutionStrategy {
force 'test.nebula:a:latest.release'
force 'test.nebula:b:latest.release'
}
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
@Issue("Based on https://github.com/nebula-plugins/gradle-nebula-integration/issues/11")
def 'apply a static version via details.useVersion for 1 direct dep and align results'() {
given:
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:c:0.5.0')
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << """
{
"align": [
$alignRuleForTestNebula
]
}
""".stripIndent()
buildFile << """
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b:0.5.0'
implementation 'test.nebula:c:1.0.0'
}
configurations.all {
resolutionStrategy.eachDependency { details ->
if (details.requested.name == 'a') {
details.useVersion '0.5.0'
details.because('$reason')
}
}
}
""".stripIndent()
String resultingVersion = "1.0.0"
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion) // alignment wins over the details.useVersion via `By conflict resolution : between versions 1.0.0 and 0.5.0`
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
@Issue("Based on https://github.com/nebula-plugins/gradle-nebula-integration/issues/11")
def 'apply a static version via details.useVersion for each dependency and align results'() {
given:
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.other:brings-a:1.0.0').addDependency('test.nebula:a:1.0.2').build())
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << """
{
"align": [
$alignRuleForTestNebula
]
}
""".stripIndent()
buildFile << """
dependencies {
implementation 'test.other:brings-a:1.0.0'
implementation 'test.nebula:b:1.0.0'
implementation 'test.nebula:c:1.0.3'
}
configurations.all {
resolutionStrategy.eachDependency { details ->
if (details.requested.group == 'test.nebula') {
details.useVersion '1.0.1'
details.because('$reason')
}
}
}
""".stripIndent()
String resultingVersion = "1.0.1"
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
@Issue("Based on https://github.com/nebula-plugins/gradle-nebula-integration/issues/11")
def 'apply a static version via details.useVersion for 1 direct dep and align results with conflict resolution involved'() {
given:
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:c:0.5.0')
.addModule(new ModuleBuilder('test.other:brings-a:1.0.0').addDependency('test.nebula:a:1.0.2').build())
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << """
{
"align": [
$alignRuleForTestNebula
]
}
""".stripIndent()
buildFile << """
dependencies {
implementation 'test.other:brings-a:1.0.0'
implementation 'test.nebula:b:0.5.0'
implementation 'test.nebula:c:1.0.0'
}
configurations.all {
resolutionStrategy.eachDependency { details ->
if (details.requested.name == 'c') {
details.useVersion '0.5.0'
details.because('$reason')
}
}
}
""".stripIndent()
String resultingVersion = "1.0.2"
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
@Issue("Based on https://github.com/nebula-plugins/gradle-nebula-integration/issues/11")
def 'apply a static version via details.useVersion for 1 direct dep and align results without conflict resolution involved'() {
given:
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:c:0.5.0')
.addModule(new ModuleBuilder('test.other:brings-a:1.0.0').addDependency('test.nebula:a:1.0.0').build())
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << """
{
"align": [
$alignRuleForTestNebula
]
}
""".stripIndent()
buildFile << """
dependencies {
implementation 'test.other:brings-a:1.0.0'
implementation 'test.nebula:b:0.5.0'
implementation 'test.nebula:c:1.0.0'
}
configurations.all {
resolutionStrategy.eachDependency { details ->
if (details.requested.name == 'c') {
details.useVersion '0.5.0'
details.because('$reason')
}
}
}
""".stripIndent()
String resultingVersion = "1.0.0"
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", resultingVersion) // alignment wins over the details.useVersion
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'statically defined dependency: sub & align all versions higher than x and align'() {
given:
String definedVersion = "4.2.0"
String resultingVersion = '4.1.0'
// based on https://github.com/nebula-plugins/gradle-nebula-integration/issues/50
setupForGuiceAndLibraryDependency(definedVersion)
when:
def result = runTasks(*tasks(false, 'com.google.inject'))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "com.google.inject.extensions:guice-assistedinject", resultingVersion)
dependencyInsightContains(result.output, "com.google.inject.extensions:guice-grapher", resultingVersion)
dependencyInsightContains(result.output, "com.google.inject.extensions:guice-multibindings", resultingVersion)
dependencyInsightContains(result.output, "com.google.inject:guice", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-com.google.inject:$resultingVersion")
}
def 'dynamically defined dependency: substituting all versions higher than x and aligning'() {
given:
// based on https://github.com/nebula-plugins/gradle-nebula-integration/issues/50
// Also, substitutions apply on declared dependencies, not resolved ones
String definedVersion = "4.+"
String resultingVersionForDepsOtherThanCoreGuice = '4.1.0'
setupForGuiceAndLibraryDependency(definedVersion)
when:
def result = runTasks(*tasks(false, 'com.google.inject'))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "com.google.inject.extensions:guice-assistedinject", resultingVersionForDepsOtherThanCoreGuice)
dependencyInsightContains(result.output, "com.google.inject.extensions:guice-grapher", resultingVersionForDepsOtherThanCoreGuice)
dependencyInsightContains(result.output, "com.google.inject.extensions:guice-multibindings", resultingVersionForDepsOtherThanCoreGuice)
// since this test uses an external dependency that keeps incrementing, let's check that it just doesn't get the same result
def content = "com.google.inject:guice:.*$resultingVersionForDepsOtherThanCoreGuice\n"
assert result.output.findAll(content).size() == 0
// just make sure there's a value here for dependencyInsight
dependencyInsightContains(result.output, "com.google.inject:guice", '')
def alignedPlatformPartialVersion = "4."
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-com.google.inject:$alignedPlatformPartialVersion")
}
def 'transitive dependencies are aligned'() {
given:
def substituteFromVersion = "[1.0.1,1.1.0)"
def substituteToVersion = "1.0.0"
String resultingVersion = "1.0.0"
Map<String, String> modulesAndSubstitutions = new HashMap<>()
modulesAndSubstitutions.put("test.nebula:a:$substituteFromVersion".toString(), "test.nebula:a:$substituteToVersion".toString())
modulesAndSubstitutions.put("test.nebula:f:$substituteFromVersion".toString(), "test.nebula:f:$substituteToVersion".toString())
createAlignAndSubstituteRule(modulesAndSubstitutions)
buildFile << """
dependencies {
implementation 'test.nebula:f:1.0.3'
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:f", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'alignment rule excludes are honored'() {
given:
def module = "[1.0.1,1.0.3)"
def with = "1.0.3"
rulesJsonFile << """
{
"substitute": [
{
"module" : "test.nebula:b:$module",
"with" : "test.nebula:b:$with",
"reason" : "$reason",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
}
],
"align": [
{
"group": "(test.nebula|test.nebula.ext)",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"includes": [],
"excludes": ["(c|g)"],
"date": "2016-03-17T20:21:20.368Z"
}
]
}
""".stripIndent()
buildFile << """
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b:1.0.1'
implementation 'test.nebula:c:1.0.2'
implementation 'test.nebula:g:1.0.1'
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
def alignedResultingVersion = "1.0.3"
dependencyInsightContains(result.output, "test.nebula:a", alignedResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", alignedResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", '1.0.2')
dependencyInsightContains(result.output, "test.nebula:g", '1.0.1')
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$alignedResultingVersion")
}
def 'alignment rule includes are honored'() {
given:
def module = "[1.0.1,1.0.3)"
def with = "1.0.3"
rulesJsonFile << """
{
"substitute": [
{
"module" : "test.nebula:b:$module",
"with" : "test.nebula:b:$with",
"reason" : "$reason",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
}
],
"align": [
{
"group": "(test.nebula|test.nebula.ext)",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"includes": ["(a|b)"],
"excludes": [],
"date": "2016-03-17T20:21:20.368Z"
}
]
}
""".stripIndent()
buildFile << """
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b:1.0.1'
implementation 'test.nebula:c:1.0.2'
implementation 'test.nebula:g:1.0.1'
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
def alignedResultingVersion = "1.0.3"
dependencyInsightContains(result.output, "test.nebula:a", alignedResultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", alignedResultingVersion)
dependencyInsightContains(result.output, "test.nebula:c", '1.0.2')
dependencyInsightContains(result.output, "test.nebula:g", '1.0.1')
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$alignedResultingVersion")
}
def 'recs with core bom support disabled: sub & align from bom version to lower static version'() {
given:
String bomVersion = "1.0.2"
String substituteToVersion = "1.0.1"
String resultingVersion = "1.0.2" // FIXME: should resolve differently
Boolean coreBomSupport = false
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'recs with core bom support disabled: sub & align from bom version to higher static version'() {
given:
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.3"
resultingVersion = "1.0.2" // FIXME: should resolve differently
coreBomSupport = false
}
def 'recs with core bom support disabled: sub & align from bom version to higher minor-scoped dynamic version'() {
given:
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.+"
resultingVersion = "1.0.2" // FIXME: should resolve differently
coreBomSupport = false
}
def 'recs with core bom support disabled: sub & align from bom version to higher patch-scoped dynamic version'() {
given:
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.3"
resultingVersion = "1.0.2" // FIXME: should resolve differently
coreBomSupport = false
}
def 'recs with core bom support disabled: not substitute when resulting version is not in substitute-away-from range'() {
given:
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.0"
substituteToVersion = "1.0.1"
resultingVersion = "1.0.0"
coreBomSupport = false
}
def 'recs with core bom support enabled: sub & align from bom version to lower static version'() {
given:
String bomVersion = "1.0.2"
String substituteToVersion = "1.0.1"
String resultingVersion = "1.0.1"
Boolean coreBomSupport = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
}
def 'recs with core bom support enabled: sub & align from bom version to higher static version'() {
given:
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.3"
resultingVersion = "1.0.3"
coreBomSupport = true
}
def 'recs with core bom support enabled: sub & align from bom version to higher minor-scoped dynamic version'() {
given:
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.+"
resultingVersion = "1.1.0"
coreBomSupport = true
}
def 'recs with core bom support enabled: sub & align from bom version to higher patch-scoped dynamic version'() {
given:
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.3"
resultingVersion = "1.0.3"
coreBomSupport = true
}
def 'recs with core bom support enabled: do not substitute when resulting version is not in substitute-away-from range'() {
given:
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.0"
substituteToVersion = "1.0.1"
resultingVersion = "1.0.0"
coreBomSupport = true
}
def 'enforced recs with core bom support disabled: sub & align from bom version to lower static version'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*tasks(coreBomSupport))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.1"
resultingVersion = "1.0.2" // FIXME: should resolve differently
coreBomSupport = false
}
def 'enforced recs with core bom support disabled: sub & align from bom version to higher static version'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.3"
resultingVersion = "1.0.2" // FIXME: should resolve differently
coreBomSupport = false
}
def 'enforced recs with core bom support disabled: sub & align from bom version to higher minor-scoped dynamic version'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.+"
resultingVersion = "1.0.2" // FIXME: should resolve differently
coreBomSupport = false
}
def 'enforced recs with core bom support disabled: sub & align from bom version to higher patch-scoped dynamic version'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.3"
resultingVersion = "1.0.2" // FIXME: should resolve differently
coreBomSupport = false
}
def 'enforced recs with core bom support disabled: do not substitute when resulting version is not in substitute-away-from range'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*tasks(coreBomSupport))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.0"
substituteToVersion = "1.0.1"
resultingVersion = "1.0.0"
coreBomSupport = false
}
def 'enforced recs with core bom support enabled: sub & align from bom version to lower static version'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*tasks(coreBomSupport))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.1"
resultingVersion = "1.0.1"
coreBomSupport = true
}
def 'enforced recs with core bom support enabled: sub & align from bom version to higher static version'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.3"
resultingVersion = "1.0.3"
coreBomSupport = true
}
def 'enforced recs with core bom support enabled: sub & align from bom version to higher minor-scoped dynamic version'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.+"
resultingVersion = "1.1.0"
coreBomSupport = true
}
def 'enforced recs with core bom support enabled: sub & align from bom version to higher patch-scoped dynamic version'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*(tasks(coreBomSupport)))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.2"
substituteToVersion = "1.0.3"
resultingVersion = "1.0.3"
coreBomSupport = true
}
def 'enforced recs with core bom support enabled: do not substitute when resulting version is not in substitute-away-from range'() {
given:
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, substituteToVersion, usingEnforcedPlatform)
when:
def result = runTasks(*tasks(coreBomSupport))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
bomVersion = "1.0.0"
substituteToVersion = "1.0.1"
resultingVersion = "1.0.0"
coreBomSupport = true
}
def 'multiple substitutions applied: direct static dependency: honor multiple substitutions'() {
given:
createMultipleSubstitutionRules()
buildFile << """
dependencies {
implementation 'test.nebula:a:1.0.1'
implementation 'test.nebula:b:1.0.3'
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("substituted test.nebula:a:1.0.1 with test.nebula:a:1.0.2")
assert result.output.contains("substituted test.nebula:b:1.0.3 with test.nebula:b:1.0.2")
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
resultingVersion = "1.0.2"
}
def 'multiple substitutions applied: only brought in transitively: honor multiple substitutions'() {
given:
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.other:brings-a:1.0.0').addDependency('test.nebula:a:1.0.1').build())
.addModule(new ModuleBuilder('test.other:also-brings-a:1.0.0').addDependency('test.nebula:a:1.0.3').build())
.addModule(new ModuleBuilder('test.other:brings-b:1.0.0').addDependency('test.nebula:b:1.0.3').build())
.build()
mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
createMultipleSubstitutionRules()
buildFile << """
dependencies {
implementation 'test.other:brings-a:latest.release'
implementation 'test.other:brings-b:latest.release'
}
""".stripIndent()
when:
def result = runTasks(*tasks())
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("substituted test.nebula:a:1.0.1 with test.nebula:a:1.0.2")
assert result.output.contains("substituted test.nebula:b:1.0.3 with test.nebula:b:1.0.2")
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
resultingVersion = "1.0.2"
}
def 'multiple substitutions applied: recs with core bom support disabled: honor multiple substitutions'() {
given:
def bomVersion = "1.0.1"
setupForBomAndAlignmentAndSubstitution(bomVersion, "")
rulesJsonFile.delete()
rulesJsonFile.createNewFile()
createMultipleSubstitutionRules()
when:
def result = runTasks(*tasks(coreBomSupport))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("Recommending version 1.0.1 for dependency test.nebula:a via conflict resolution recommendation")
assert result.output.contains("Recommending version 1.0.1 for dependency test.nebula:b via conflict resolution recommendation")
assert result.output.contains("substituted test.nebula:a:1.0.1 with test.nebula:a:1.0.2")
assert result.output.contains("substituted test.nebula:b:1.0.1 with test.nebula:b:1.0.2")
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
resultingVersion = "1.0.1"
coreBomSupport = false
}
def 'multiple substitutions applied: recs with core bom support enabled: honor multiple substitutions'() {
given:
def bomVersion = "1.0.1"
setupForBomAndAlignmentAndSubstitution(bomVersion, "")
rulesJsonFile.delete()
rulesJsonFile.createNewFile()
createMultipleSubstitutionRules()
when:
def result = runTasks(*tasks(coreBomSupport))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("substituted test.nebula:a:1.0.1 with test.nebula:a:1.0.2")
assert result.output.contains("substituted test.nebula:b:1.0.1 with test.nebula:b:1.0.2")
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
resultingVersion = "1.0.2"
coreBomSupport = true
}
def 'multiple substitutions applied: enforced recs with core bom support disabled: honor multiple substitutions'() {
given:
def bomVersion = "1.0.1"
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, "", usingEnforcedPlatform)
rulesJsonFile.delete()
rulesJsonFile.createNewFile()
createMultipleSubstitutionRules()
when:
def result = runTasks(*tasks(coreBomSupport))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("substituted test.nebula:a:1.0.1 with test.nebula:a:1.0.2")
assert result.output.contains("substituted test.nebula:b:1.0.1 with test.nebula:b:1.0.2")
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
resultingVersion = "1.0.1"
coreBomSupport = false
}
def 'multiple substitutions applied: enforced recs with core bom support enabled: honor multiple substitutions'() {
given:
def bomVersion = "1.0.1"
def usingEnforcedPlatform = true
setupForBomAndAlignmentAndSubstitution(bomVersion, "", usingEnforcedPlatform)
rulesJsonFile.delete()
rulesJsonFile.createNewFile()
createMultipleSubstitutionRules()
when:
def result = runTasks(*tasks(coreBomSupport))
then:
writeOutputToProjectDir(result.output)
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
assert result.output.contains("substituted test.nebula:a:1.0.1 with test.nebula:a:1.0.2")
assert result.output.contains("substituted test.nebula:b:1.0.1 with test.nebula:b:1.0.2")
assert result.output.contains("belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:$resultingVersion")
where:
resultingVersion = "1.0.2"
coreBomSupport = true
}
private File createMultipleSubstitutionRules() {
rulesJsonFile << """
{
"substitute": [
{
"module" : "test.nebula:a:1.0.1",
"with" : "test.nebula:a:1.0.2",
"reason" : "1.0.1 is too small",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
},
{
"module" : "test.nebula:b:1.0.1",
"with" : "test.nebula:b:1.0.2",
"reason" : "1.0.1 is too small",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
},
{
"module" : "test.nebula:a:1.1.0",
"with" : "test.nebula:a:1.0.3",
"reason" : "1.1.0 is too large",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
},
{
"module" : "test.nebula:b:1.1.0",
"with" : "test.nebula:b:1.0.3",
"reason" : "1.1.0 is too large",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
},
{
"module" : "test.nebula:a:1.0.3",
"with" : "test.nebula:a:1.0.2",
"reason" : "1.0.3 is also too large",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
},
{
"module" : "test.nebula:b:1.0.3",
"with" : "test.nebula:b:1.0.2",
"reason" : "1.0.3 is also too large",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
}
],
"align": [
$alignRuleForTestNebula
]
}
""".stripIndent()
}
private String baseBuildGradleFile(String additionalPlugin = '') {
def pluginToAdd = ''
if (additionalPlugin != '') {
pluginToAdd = "\n\tid $additionalPlugin"
}
"""
plugins {
id 'java'
id 'nebula.resolution-rules'$pluginToAdd
}
repositories {
maven { url '${projectDir.toPath().relativize(mavenrepo.toPath()).toFile()}' }
}
dependencies {
resolutionRules files("${projectDir.toPath().relativize(rulesJsonFile.toPath()).toFile()}")
}
""".stripIndent()
}
private def setupForSimplestSubstitutionAndAlignmentCases(String substituteFromVersion, String substituteToVersion, List<String> definedVersions) {
Map<String, String> modulesAndSubstitutions = new HashMap<>()
modulesAndSubstitutions.put("test.nebula:a:$substituteFromVersion".toString(), "test.nebula:a:$substituteToVersion".toString())
modulesAndSubstitutions.put("test.nebula:b:$substituteFromVersion".toString(), "test.nebula:b:$substituteToVersion".toString())
assert definedVersions.size() == 2
createAlignAndSubstituteRule(modulesAndSubstitutions)
buildFile << """
dependencies {
implementation 'test.nebula:a:${definedVersions[0]}'
implementation 'test.nebula:b:${definedVersions[1]}'
}
""".stripIndent()
}
private def setupForSubstitutionAndAlignmentCasesWithMissingVersions(String substituteFromVersion, String substituteToVersion, List<String> definedVersions) {
Map<String, String> modulesAndSubstitutions = new HashMap<>()
modulesAndSubstitutions.put("test.nebula:a:$substituteFromVersion".toString(), "test.nebula:a:$substituteToVersion".toString())
modulesAndSubstitutions.put("test.nebula:b:$substituteFromVersion".toString(), "test.nebula:b:$substituteToVersion".toString())
modulesAndSubstitutions.put("test.nebula:c:$substituteFromVersion".toString(), "test.nebula:c:$substituteToVersion".toString())
assert definedVersions.size() == 3
createAlignAndSubstituteRule(modulesAndSubstitutions)
buildFile << """
dependencies {
implementation 'test.nebula:a:${definedVersions[0]}'
implementation 'test.nebula:b:${definedVersions[1]}'
implementation 'test.nebula:c:${definedVersions[2]}'
}
""".stripIndent()
}
private def setupForBomAndAlignmentAndSubstitution(String bomVersion, String substituteToVersion, boolean usingEnforcedPlatform = false) {
def substituteFromVersion = "1.0.2"
Map<String, String> modulesAndSubstitutions = new HashMap<>()
modulesAndSubstitutions.put("test.nebula:a:$substituteFromVersion".toString(), "test.nebula:a:$substituteToVersion".toString())
modulesAndSubstitutions.put("test.nebula:b:$substituteFromVersion".toString(), "test.nebula:b:$substituteToVersion".toString())
createAlignAndSubstituteRule(modulesAndSubstitutions)
def bomRepo = createBom(["test.nebula:a:$bomVersion", "test.nebula:b:$bomVersion"])
buildFile.text = ""
buildFile << """
buildscript {
repositories { mavenCentral() }
}
""".stripIndent()
buildFile << baseBuildGradleFile("'nebula.dependency-recommender' version '9.0.1\'")
if (!usingEnforcedPlatform) {
buildFile << """
dependencyRecommendations {
mavenBom module: 'test.nebula.bom:testbom:latest.release'
}
""".stripIndent()
} else {
buildFile << """
dependencyRecommendations {
mavenBom module: 'test.nebula.bom:testbom:latest.release', enforced: true
}
""".stripIndent()
}
buildFile << """
dependencies {
implementation 'test.nebula:a'
implementation 'test.nebula:b'
}
repositories {
maven { url '${bomRepo.root.absoluteFile.toURI()}' }
}
""".stripIndent()
}
def setupForGuiceAndLibraryDependency(String definedVersion) {
rulesJsonFile << """
{
"align": [
{
"group": "com.google.inject",
"reason": "Align guice",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
],
"substitute": [
{
"module" : "com.google.inject:guice:[4.2.0,)",
"with" : "com.google.inject:guice:4.1.0",
"reason" : "$reason",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
},
{
"module": "com.google.inject.extensions:guice-assistedinject:[4.2.0,)",
"with": "com.google.inject.extensions:guice-assistedinject:4.1.0",
"reason": "$reason",
"author": "Test user <<EMAIL>>",
"date": "2015-10-07T20:21:20.368Z"
},
{
"module": "com.google.inject.extensions:guice-grapher:[4.2.0,)",
"with": "com.google.inject.extensions:guice-grapher:4.1.0",
"reason": "$reason",
"author": "Test user <<EMAIL>>",
"date": "2015-10-07T20:21:20.368Z"
},
{
"module": "com.google.inject.extensions:guice-multibindings:[4.2.0,)",
"with": "com.google.inject.extensions:guice-multibindings:4.1.0",
"reason": "$reason",
"author": "Test user <<EMAIL>>",
"date": "2015-10-07T20:21:20.368Z"
}
]
}
""".stripIndent()
buildFile << """
repositories {
mavenCentral()
maven {
url 'repo'
}
}
dependencies {
//at the time of writing resolves to 4.2.2
implementation "com.google.inject:guice:$definedVersion"
implementation "test.nebula:a:1.0"
}
"""
MavenRepo repo = new MavenRepo()
repo.root = new File(projectDir, 'repo')
Pom pom = new Pom('test.nebula', 'a', '1.0', ArtifactType.POM)
pom.addDependency('com.google.inject.extensions', 'guice-grapher', '4.1.0')
repo.poms.add(pom)
repo.generate()
}
private def createBom(List<String> dependencies) {
MavenRepo repo = new MavenRepo()
repo.root = new File(projectDir, 'build/bomrepo')
Pom pom = new Pom('test.nebula.bom', 'testbom', '1.0.0', ArtifactType.POM)
dependencies.each { dependency ->
def depParts = dependency.split(':')
assert depParts.size() == 3
pom.addManagementDependency(depParts[0], depParts[1], depParts[2])
}
repo.poms.add(pom)
repo.generate()
return repo
}
private def createAlignAndSubstituteRule(String module, String with) {
rulesJsonFile << """
{
"substitute": [
{
"module" : "$module",
"with" : "$with",
"reason" : "$reason",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
}
],
"align": [
$alignRuleForTestNebula
]
}
""".stripIndent()
}
private def createAlignAndSubstituteRule(Map<String, String> modulesAndSubstitutions) {
rulesJsonFile << """
{
"substitute": [
""".stripIndent()
List<String> substitutions = new ArrayList<>()
modulesAndSubstitutions.each { module, with ->
substitutions.add("""
{
"module" : "$module",
"with" : "$with",
"reason" : "$reason",
"author" : "Test user <<EMAIL>>",
"date" : "2015-10-07T20:21:20.368Z"
}""".stripIndent())
}
rulesJsonFile << substitutions.join(',')
rulesJsonFile << """
],
"align": [
$alignRuleForTestNebula
]
}
""".stripIndent()
}
private static def tasks( Boolean usingCoreBomSupport = false, String groupForInsight = 'test.nebula') {
return [
'dependencyInsight',
'--dependency',
groupForInsight,
"-Dnebula.features.coreAlignmentSupport=true",
"-Dnebula.features.coreBomSupport=$usingCoreBomSupport"
]
}
private static void dependencyInsightContains(String resultOutput, String groupAndName, String resultingVersion) {
def content = "$groupAndName:.*$resultingVersion\n"
assert resultOutput.findAll(content).size() >= 1
}
private void writeOutputToProjectDir(String output) {
def file = new File(projectDir, "result.txt")
file.createNewFile()
file << output
}
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignRulesBasicWithCoreSpec.groovy<|end_filename|>
package nebula.plugin.resolutionrules
import nebula.test.IntegrationTestKitSpec
import nebula.test.dependencies.DependencyGraphBuilder
import nebula.test.dependencies.GradleDependencyGenerator
import nebula.test.dependencies.ModuleBuilder
import org.gradle.api.logging.LogLevel
import org.gradle.util.GradleVersion
import spock.lang.Unroll
class AlignRulesBasicWithCoreSpec extends IntegrationTestKitSpec {
private def rulesJsonFile
def setup() {
debug = true
keepFiles = true
if (GradleVersion.current().baseVersion < GradleVersion.version("6.0")) {
settingsFile << '''\
enableFeaturePreview("GRADLE_METADATA")
'''.stripIndent()
}
rulesJsonFile = new File(projectDir, "rules.json")
rulesJsonFile.createNewFile()
buildFile << """\
plugins {
id 'nebula.resolution-rules'
id 'java'
}
dependencies {
resolutionRules files('$rulesJsonFile')
}
""".stripIndent()
settingsFile << """\
rootProject.name = '${moduleName}'
""".stripIndent()
logLevel = LogLevel.INFO
}
def 'align rules and force to latest.release'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:1.0.1')
.addModule('test.nebula:a:1.1.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:1.0.1')
.addModule('test.nebula:b:1.1.0')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
rulesJsonFile << alignTestNebulaRule()
buildFile << """\
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b:1.1.0'
}
configurations.all {
resolutionStrategy {
force 'test.nebula:a:latest.release'
}
}
""".stripIndent()
when:
def result = runTasks('dependencyInsight', '--dependency', 'test.nebula')
then:
def resultingVersion = "1.1.0"
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
result.output.contains 'belongs to platform aligned-platform:rules-0-for-test.nebula-or-test.nebula.ext:1.1.0'
}
def 'align rules and force to latest.release when brought in transitively'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:1.0.1')
.addModule('test.nebula:a:1.1.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:1.0.1')
.addModule('test.nebula:b:1.1.0')
.addModule(new ModuleBuilder('test.other:brings-a:1.0.0').addDependency('test.nebula:a:1.0.3').build())
.addModule(new ModuleBuilder('test.other:also-brings-a:1.0.0').addDependency('test.nebula:a:1.1.0').build())
.addModule(new ModuleBuilder('test.other:brings-b:1.0.0').addDependency('test.nebula:b:1.1.0').build())
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
rulesJsonFile << alignTestNebulaRule()
buildFile << """\
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencies {
implementation 'test.other:brings-a:latest.release'
implementation 'test.other:also-brings-a:latest.release'
implementation 'test.other:brings-b:latest.release'
}
configurations.all {
resolutionStrategy {
force 'test.nebula:a:latest.release'
}
}
""".stripIndent()
when:
def result = runTasks('dependencyInsight', '--dependency', 'test.nebula')
then:
def resultingVersion = "1.1.0"
dependencyInsightContains(result.output, "test.nebula:a", resultingVersion)
dependencyInsightContains(result.output, "test.nebula:b", resultingVersion)
}
def 'multiple align rules'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:1.1.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:1.1.0')
.addModule('test.other:c:0.12.2')
.addModule('test.other:c:1.0.0')
.addModule('test.other:d:0.12.2')
.addModule('test.other:d:1.0.0')
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
},
{
"name": "testOther",
"group": "test.other",
"reason": "Aligning test",
"author": "<NAME> <<EMAIL>>",
"date": "2016-04-05T19:19:49.495Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b:1.1.0'
implementation 'test.other:c:1.0.0'
implementation 'test.other:d:0.12.+'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains 'test.nebula:a:1.0.0 -> 1.1.0\n'
result.output.contains 'test.nebula:b:1.1.0\n'
result.output.contains 'test.other:c:1.0.0\n'
result.output.contains 'test.other:d:0.12.+ -> 1.0.0\n'
}
@Unroll
def 'core alignment uses versions observed during resolution'() {
// test case from https://github.com/nebula-plugins/gradle-nebula-integration/issues/52
// higher version transitive aligning parent dependency
given:
rulesJsonFile << """
{
"align": [
{
"name": "exampleapp-client-align",
"group": "test.nebula",
"includes": [ "exampleapp-.*" ],
"excludes": [],
"reason": "Library all together",
"author": "<EMAIL>",
"date": "2018-03-01"
}
],
"deny": [],
"exclude": [],
"reject": [],
"replace": [],
"substitute": []
}
""".stripIndent()
def mavenrepo = createDependenciesForExampleAppDependencies()
buildFile << """
repositories {
${mavenrepo.mavenRepositoryBlock}
}
dependencies {
implementation 'test.nebula:exampleapp-client:80.0.139'
}
""".stripIndent()
when:
def dependenciesResult = runTasks('dependencies')
def result = runTasks(*tasks())
then:
dependencyInsightContains(result.output, "test.nebula:exampleapp-client", resultingVersion)
assert dependenciesResult.output.contains("""
\\--- test.nebula:exampleapp-client:80.0.139 -> 80.0.225
+--- test.nebula:exampleapp-common:80.0.249
\\--- test.nebula:exampleapp-smart-client:80.0.10
""".stripIndent())
where:
resultingVersion << ["80.0.225"]
}
private static def tasks(Boolean usingCoreBomSupport = false, String groupForInsight = 'test.nebula') {
return [
'dependencyInsight',
'--dependency',
groupForInsight,
"-Dnebula.features.coreBomSupport=$usingCoreBomSupport"
]
}
private static void dependencyInsightContains(String resultOutput, String groupAndName, String resultingVersion) {
def content = "$groupAndName:.*$resultingVersion\n"
assert resultOutput.findAll(content).size() >= 1
}
private static String alignTestNebulaRule() {
return '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "(test.nebula|test.nebula.ext)",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
}
private GradleDependencyGenerator createDependenciesForExampleAppDependencies() {
def client = 'test.nebula:exampleapp-client'
def common = 'test.nebula:exampleapp-common'
def model = 'test.nebula:exampleapp-model'
def smartClient = 'test.nebula:exampleapp-smart-client'
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder("$client:80.0.139")
.addDependency("$common:80.0.154")
.build())
.addModule(new ModuleBuilder("$client:80.0.154")
.addDependency("$common:80.0.177")
.build())
.addModule(new ModuleBuilder("$client:80.0.177")
.addDependency("$common:80.0.201")
.build())
.addModule(new ModuleBuilder("$client:80.0.201")
.addDependency("$common:80.0.225")
.build())
.addModule(new ModuleBuilder("$client:80.0.225")
.addDependency("$common:80.0.249")
.addDependency("$smartClient:80.0.10")
.build())
.addModule(new ModuleBuilder("$client:80.0.236")
.addDependency("$common:80.0.260")
.addDependency("$smartClient:80.0.21")
.build())
.addModule("$common:80.0.154")
.addModule("$common:80.0.177")
.addModule("$common:80.0.201")
.addModule("$common:80.0.225")
.addModule("$common:80.0.249")
.addModule("$common:80.0.260")
.addModule("$model:80.0.15")
.addModule("$smartClient:80.0.10")
.addModule(new ModuleBuilder("$smartClient:80.0.21")
.addDependency("$model:80.0.15")
.build())
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen")
mavenrepo.generateTestMavenRepo()
return mavenrepo
}
}
<|start_filename|>src/main/kotlin/nebula/plugin/resolutionrules/alignRule.kt<|end_filename|>
package nebula.plugin.resolutionrules
import org.gradle.api.Project
import org.gradle.api.artifacts.*
import org.gradle.api.artifacts.ModuleVersionIdentifier
import org.gradle.api.internal.ReusableAction
import org.gradle.api.logging.Logger
import org.gradle.api.logging.Logging
import java.io.Serializable
import java.util.concurrent.ConcurrentHashMap
import java.util.regex.Matcher
import java.util.regex.Pattern
import javax.inject.Inject
data class AlignRule(val name: String?,
val group: Regex,
val includes: List<Regex> = emptyList(),
val excludes: List<Regex> = emptyList(),
val match: String?,
override var ruleSet: String?,
override val reason: String,
override val author: String,
override val date: String,
var belongsToName: String?) : BasicRule, Serializable {
private val groupPattern = group.toPattern()
private val includesPatterns = includes.map { it.toPattern() }
private val excludesPatterns = excludes.map { it.toPattern() }
private val alignMatchers = ConcurrentHashMap<Thread, AlignMatcher>()
override fun apply(project: Project,
configuration: Configuration,
resolutionStrategy: ResolutionStrategy,
extension: NebulaResolutionRulesExtension) {
//TODO this rule is applied repeatedly for each configuration. Ideally it should be taken out and
//applied only once per project
if (configuration.name == "compileClasspath") { // This is one way to ensure it'll be run for only one configuration
project.dependencies.components.all(AlignedPlatformMetadataRule::class.java) {
it.params(this)
}
}
}
fun ruleMatches(dep: ModuleVersionIdentifier) = ruleMatches(dep.group, dep.name)
fun ruleMatches(group: String, name: String) = alignMatchers.computeIfAbsent(Thread.currentThread()) {
AlignMatcher(this, groupPattern, includesPatterns, excludesPatterns)
}.matches(group, name)
}
class AlignMatcher(val rule: AlignRule, groupPattern: Pattern, includesPatterns: List<Pattern>, excludesPatterns: List<Pattern>) {
private val groupMatcher = groupPattern.matcher("")
private val includeMatchers = includesPatterns.map { it.matcher("") }
private val excludeMatchers = excludesPatterns.map { it.matcher("") }
private fun Matcher.matches(input: String, type: String): Boolean {
reset(input)
return try {
matches()
} catch (e: Exception) {
throw java.lang.IllegalArgumentException("Failed to use matcher '$this' from type '$type' to match '$input'\n" +
"Rule: $rule", e)
}
}
fun matches(group: String, name: String): Boolean {
return groupMatcher.matches(group, "group") &&
(includeMatchers.isEmpty() || includeMatchers.any { it.matches(name, "includes") }) &&
(excludeMatchers.isEmpty() || excludeMatchers.none { it.matches(name, "excludes") })
}
}
@CacheableRule
open class AlignedPlatformMetadataRule @Inject constructor(val rule: AlignRule) : ComponentMetadataRule, Serializable, ReusableAction {
private val logger: Logger = Logging.getLogger(AlignedPlatformMetadataRule::class.java)
override fun execute(componentMetadataContext: ComponentMetadataContext?) {
modifyDetails(componentMetadataContext!!.details)
}
fun modifyDetails(details: ComponentMetadataDetails) {
if (rule.ruleMatches(details.id)) {
details.belongsTo("aligned-platform:${rule.belongsToName}:${details.id.version}")
logger.debug("Aligning platform based on '${details.id.group}:${details.id.name}:${details.id.version}' from align rule with group '${rule.group}'")
}
}
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignRulesMultiprojectSpec.groovy<|end_filename|>
/*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package nebula.plugin.resolutionrules
import nebula.test.IntegrationSpec
import nebula.test.dependencies.DependencyGraphBuilder
import nebula.test.dependencies.GradleDependencyGenerator
import spock.lang.Unroll
class AlignRulesMultiprojectSpec extends IntegrationSpec {
def rulesJsonFile
def aDir
def bDir
def setup() {
// Avoid deprecation warnings during parallel resolution while we look for a solution
System.setProperty('ignoreDeprecations', 'true')
System.setProperty('ignoreMutableProjectStateWarnings', 'true')
fork = false
rulesJsonFile = new File(projectDir, "${moduleName}.json")
buildFile << """\
allprojects {
${applyPlugin(ResolutionRulesPlugin)}
group = 'test.nebula'
}
project(':a') {
apply plugin: 'java'
}
project(':b') {
apply plugin: 'java-library'
}
dependencies {
resolutionRules files('$rulesJsonFile')
}
""".stripIndent()
settingsFile << '''\
rootProject.name = 'aligntest'
'''.stripIndent()
aDir = addSubproject('a')
bDir = addSubproject('b')
}
@Unroll
def 'align rules do not interfere with a multiproject that produces the jars being aligned (parallel #parallel)'() {
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"includes": ["a", "b"],
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
// project b depends on a
new File(bDir, 'build.gradle') << '''\
dependencies {
implementation project(':a')
}
'''.stripIndent()
buildFile << '''\
subprojects {
apply plugin: 'maven-publish'
publishing {
publications {
test(MavenPublication) {
from components.java
}
}
repositories {
maven {
name 'repo'
url 'build/repo'
}
}
}
}
'''.stripIndent()
when:
def tasks = [':b:dependencies', '--configuration', 'compileClasspath']
if (parallel) {
tasks += "--parallel"
}
def results = runTasksSuccessfully(*tasks)
then:
results.standardOutput.contains('\\--- project :a\n')
where:
parallel << [false, true]
}
@Unroll
def 'cycle like behavior (parallel #parallel)'() {
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
new File(aDir, 'build.gradle') << '''\
dependencies {
testImplementation project(':b')
}
'''.stripIndent()
new File(bDir, 'build.gradle') << '''\
dependencies {
implementation project(':a')
}
'''.stripIndent()
when:
def tasks = [':a:dependencies', ':b:dependencies', 'assemble']
if (parallel) {
tasks += "--parallel"
}
runTasksSuccessfully(*tasks)
then:
noExceptionThrown()
where:
parallel << [true, false]
}
@Unroll
def 'can align project dependencies (parallel #parallel)'() {
def graph = new DependencyGraphBuilder()
.addModule('other.nebula:a:0.42.0')
.addModule('other.nebula:a:1.0.0')
.addModule('other.nebula:a:1.1.0')
.addModule('other.nebula:b:0.42.0')
.addModule('other.nebula:b:1.0.0')
.addModule('other.nebula:b:1.1.0')
.addModule('other.nebula:c:0.42.0')
.addModule('other.nebula:c:1.0.0')
.addModule('other.nebula:c:1.1.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"group": "other.nebula",
"includes": [ "a", "b" ],
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
subprojects {
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
}
project(':a') {
dependencies {
implementation project(':b')
}
}
project(':b') {
dependencies {
api 'other.nebula:a:1.0.0'
api 'other.nebula:b:1.1.0'
api 'other.nebula:c:0.42.0'
}
}
""".stripIndent()
when:
def tasks = [':a:dependencies', '--configuration', 'compileClasspath']
if (parallel) {
tasks += "--parallel"
}
def result = runTasksSuccessfully(*tasks)
then:
result.standardOutput.contains '+--- other.nebula:a:1.0.0 -> 1.1.0'
result.standardOutput.contains '+--- other.nebula:b:1.1.0'
result.standardOutput.contains '\\--- other.nebula:c:0.42.0'
where:
parallel << [true, false]
}
@Unroll
def 'root project can depend on subprojects (parallel #parallel)'() {
def graph = new DependencyGraphBuilder()
.addModule('other.nebula:a:0.42.0')
.addModule('other.nebula:a:1.0.0')
.addModule('other.nebula:a:1.1.0')
.addModule('other.nebula:b:0.42.0')
.addModule('other.nebula:b:1.0.0')
.addModule('other.nebula:b:1.1.0')
.addModule('other.nebula:c:0.42.0')
.addModule('other.nebula:c:1.0.0')
.addModule('other.nebula:c:1.1.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"group": "other.nebula",
"includes": [ "a", "b" ],
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
apply plugin: 'java'
subprojects {
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
}
dependencies {
implementation project(':a')
implementation project(':b')
}
project(':a') {
dependencies {
implementation project(':b')
}
}
project(':b') {
dependencies {
api 'other.nebula:a:1.0.0'
api 'other.nebula:b:1.1.0'
api 'other.nebula:c:0.42.0'
}
}
""".stripIndent()
when:
def tasks = [':a:dependencies', '--configuration', 'compileClasspath']
if (parallel) {
tasks += "--parallel"
}
def result = runTasksSuccessfully(*tasks)
then:
result.standardOutput.contains '+--- other.nebula:a:1.0.0 -> 1.1.0'
result.standardOutput.contains '+--- other.nebula:b:1.1.0'
result.standardOutput.contains '\\--- other.nebula:c:0.42.0'
where:
parallel << [true, false]
}
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignAndLockWithDowngradedTransitiveDependenciesSpec.groovy<|end_filename|>
package nebula.plugin.resolutionrules
import nebula.test.IntegrationTestKitSpec
import nebula.test.dependencies.DependencyGraphBuilder
import nebula.test.dependencies.GradleDependencyGenerator
import nebula.test.dependencies.ModuleBuilder
import spock.lang.Ignore
import spock.lang.Unroll
class AlignAndLockWithDowngradedTransitiveDependenciesSpec extends IntegrationTestKitSpec {
def rulesJsonFile
static def STATIC_MAJOR_MINOR_PATCH_2_9_9 = "2.9.9"
static def STATIC_MAJOR_MINOR_PATCH_MICRO_PATCH_2_9_9_3 = "2.9.9.3"
static def DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS = "2.9.+"
static def DYNAMIC_RANGE = "[2.9.9,2.10.0)"
def setup() {
rulesJsonFile = new File(projectDir, "${moduleName}.json")
buildFile << """\
buildscript {
repositories {
maven {
url "https://plugins.gradle.org/m2/"
}
}
dependencies {
classpath "com.netflix.nebula:gradle-dependency-lock-plugin:11.+"
}
}
plugins {
id 'nebula.resolution-rules'
id 'java'
}
apply plugin: 'nebula.dependency-lock'
dependencies {
resolutionRules files('$rulesJsonFile')
}
""".stripIndent()
keepFiles = true
debug = true
rulesJsonFile << jacksonAlignmentAndSubstitutionRule()
}
@Unroll
def 'use downgraded version via a static major.minor.patch force on a transitive dependency | core locking #coreLocking'() {
given:
setupDependenciesAndAdjustBuildFile()
buildFile << """
configurations.all {
resolutionStrategy {
force 'com.fasterxml.jackson.core:jackson-core:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
}
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.9')
dependenciesAreAligned(afterLockingResults.output, '2.9.9')
micropatchVersionIsNotUsed(results.output, afterLockingResults.output, '2.9.9')
where:
coreLocking << [false, true]
}
@Unroll
def 'use downgraded version via a dynamic major.minor.+ force on a transitive dependency | core locking #coreLocking'() {
given:
setupDependenciesAndAdjustBuildFile()
buildFile << """
configurations.all {
resolutionStrategy {
force 'com.fasterxml.jackson.core:jackson-core:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
}
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.10')
dependenciesAreAligned(afterLockingResults.output, '2.9.10')
micropatchVersionIsNotUsed(results.output, afterLockingResults.output, '2.9.10')
where:
coreLocking << [false, true]
}
@Unroll
def 'use downgraded version via a static major.minor.patch.micropatch forces on a transitive dependency| core locking #coreLocking'() {
given:
setupDependenciesAndAdjustBuildFile()
buildFile << """
configurations.all {
resolutionStrategy {
force 'com.fasterxml.jackson.core:jackson-databind:$STATIC_MAJOR_MINOR_PATCH_MICRO_PATCH_2_9_9_3'
}
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.9')
dependenciesAreAligned(afterLockingResults.output, '2.9.9')
micropatchVersionIsUsed(results.output, afterLockingResults.output, '2.9.9') // hurray!
where:
coreLocking << [false, true]
}
@Unroll
def 'use downgraded version via a static major.minor.patch strict constraint on a transitive dependency| core locking #coreLocking'() {
setupDependenciesAndAdjustBuildFile()
buildFile << """
dependencies {
implementation('com.fasterxml.jackson.core:jackson-core') {
version { strictly '$STATIC_MAJOR_MINOR_PATCH_2_9_9' } // add constraint
}
// add dependencies at the constraint version to the dependency graph
implementation 'com.fasterxml.jackson.core:jackson-annotations:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
implementation 'com.fasterxml.jackson.core:jackson-core:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
implementation 'com.fasterxml.jackson.core:jackson-databind:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-smile:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
implementation 'com.fasterxml.jackson.datatype:jackson-datatype-guava:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
implementation 'com.fasterxml.jackson.datatype:jackson-datatype-joda:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
implementation 'com.fasterxml.jackson.module:jackson-module-jaxb-annotations:$STATIC_MAJOR_MINOR_PATCH_2_9_9'
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.9')
dependenciesAreAligned(afterLockingResults.output, '2.9.9')
micropatchVersionIsNotUsed(results.output, afterLockingResults.output, '2.9.9')
where:
coreLocking << [false, true]
}
@Unroll
def 'use downgraded version via a dynamic major.minor.+ strict constraint on a transitive dependency| core locking #coreLocking'() {
setupDependenciesAndAdjustBuildFile()
buildFile << """
dependencies {
implementation('com.fasterxml.jackson.core:jackson-core') {
version {
strictly '$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS' // add constraint
}
}
// add dependencies at the constraint version to the dependency graph
implementation 'com.fasterxml.jackson.core:jackson-annotations:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
implementation 'com.fasterxml.jackson.core:jackson-core:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
implementation 'com.fasterxml.jackson.core:jackson-databind:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-smile:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
implementation 'com.fasterxml.jackson.datatype:jackson-datatype-guava:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
implementation 'com.fasterxml.jackson.datatype:jackson-datatype-joda:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
implementation 'com.fasterxml.jackson.module:jackson-module-jaxb-annotations:$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS'
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.10')
dependenciesAreAligned(afterLockingResults.output, '2.9.10')
micropatchVersionIsNotUsed(results.output, afterLockingResults.output, '2.9.10')
where:
coreLocking << [false, true]
}
@Unroll
def 'use downgraded version via matching forces with static major.minor.patch version| core locking #coreLocking'() {
given:
setupDependenciesAndAdjustBuildFile()
buildFile << """
configurations.all {
resolutionStrategy {
eachDependency { DependencyResolveDetails details ->
if (details.requested.group.startsWith('com.fasterxml.jackson')) {
details.useVersion "$STATIC_MAJOR_MINOR_PATCH_2_9_9"
}
}
}
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.9')
dependenciesAreAligned(afterLockingResults.output, '2.9.9')
micropatchVersionIsUsed(results.output, afterLockingResults.output, '2.9.9') // hurray!
where:
coreLocking << [false, true]
}
@Unroll
def 'use downgraded version via matching forces with dynamic major.minor.+ version| core locking #coreLocking'() {
given:
setupDependenciesAndAdjustBuildFile()
buildFile << """
configurations.all {
resolutionStrategy {
eachDependency { DependencyResolveDetails details ->
if (details.requested.group.startsWith('com.fasterxml.jackson')) {
details.useVersion "$DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS"
}
}
}
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.10')
dependenciesAreAligned(afterLockingResults.output, '2.9.10')
micropatchVersionIsUsed(results.output, afterLockingResults.output, '2.9.10') // hurray!
where:
coreLocking << [false, true]
}
@Unroll
def 'use downgraded version via virtual platform constraint with static major.minor.patch version | core locking #coreLocking'() {
// note: platform constraints like this are only possible with core Gradle alignment
given:
setupDependenciesAndAdjustBuildFile()
buildFile << """
dependencies {
constraints {
implementation("aligned-platform:${moduleName}-0-for-com.fasterxml.jackson.core-or-dataformat-or-datatype-or-jaxrs-or-jr-or-module") {
version { strictly("$STATIC_MAJOR_MINOR_PATCH_2_9_9") }
because("this version is required for compatibility")
}
}
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.9')
dependenciesAreAligned(afterLockingResults.output, '2.9.9')
micropatchVersionIsNotUsed(results.output, afterLockingResults.output, '2.9.9')
where:
coreLocking | _
false | _
true | _
}
@Unroll
@Ignore("This does not end up with aligned dependencies. This is raised to Gradle")
def 'use downgraded version via virtual platform constraint with static major.minor.patch.micropatch version | core locking #coreLocking'() {
// note: platform constraints like this are only possible with core Gradle alignment
given:
setupDependenciesAndAdjustBuildFile()
buildFile << """
dependencies {
constraints {
implementation("aligned-platform:${moduleName}-0-for-com.fasterxml.jackson.core-or-dataformat-or-datatype-or-jaxrs-or-jr-or-module") {
version { strictly("$STATIC_MAJOR_MINOR_PATCH_MICRO_PATCH_2_9_9_3") }
because("this version is required for compatibility")
}
}
}
""".stripIndent()
when:
def results = runTasks(*insightTasks(coreLocking))
runTasks(*lockingTasks(coreLocking))
def afterLockingResults = runTasks(*insightTasks(coreLocking))
then:
dependenciesAreAligned(results.output, '2.9.9')
dependenciesAreAligned(afterLockingResults.output, '2.9.9')
micropatchVersionIsUsed(results.output, afterLockingResults.output, '2.9.9')
where:
coreLocking | _
false | _
true | _
}
@Unroll
def 'use downgraded version via virtual platform constraint with dynamic version #version'() {
// note: platform constraints like this are only possible with core Gradle alignment
// this test verifies the current non-working behavior so that we can track when it changes
setupDependenciesAndAdjustBuildFile()
buildFile << """
dependencies {
constraints {
implementation("aligned-platform:${moduleName}-0-for-com.fasterxml.jackson.core-or-dataformat-or-datatype-or-jaxrs-or-jr-or-module") {
version { strictly("$version") }
because("this version is required for compatibility")
}
}
}
""".stripIndent()
when:
def results = runTasksAndFail('dependencyInsight', '--dependency', 'com.fasterxml.jackson', "--singlepath")
then:
results.output.contains('> fromIndex = -1')
where:
version | _
DYNAMIC_MAJOR_MINOR_PLUS_2_9_PLUS | _
DYNAMIC_RANGE | _
}
private static def insightTasks(boolean coreLocking) {
['dependencies', '--configuration', 'compileClasspath', *flags(coreLocking)]
}
private static def lockingTasks(boolean coreLocking) {
if (coreLocking) {
return ['dependencies', '--write-locks', '--configuration', 'compileClasspath', *flags(coreLocking)]
}
return ['generateLock', 'saveLock', *flags(coreLocking)]
}
private static def flags(boolean coreLocking) {
return ["-Dnebula.features.coreLockingSupport=${coreLocking}"]
}
private void setupDependenciesAndAdjustBuildFile(String version = "2.10.5") {
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.nebula:apricot:1.0.0')
.addDependency("com.fasterxml.jackson.core:jackson-annotations:$version")
.addDependency("com.fasterxml.jackson.core:jackson-core:$version")
.addDependency("com.fasterxml.jackson.core:jackson-databind:$version")
.addDependency("com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:$version")
.addDependency("com.fasterxml.jackson.dataformat:jackson-dataformat-smile:$version")
.addDependency("com.fasterxml.jackson.datatype:jackson-datatype-guava:$version")
.addDependency("com.fasterxml.jackson.datatype:jackson-datatype-joda:$version")
.addDependency("com.fasterxml.jackson.module:jackson-module-jaxb-annotations:$version")
.build())
.build()
def mavenrepo = new GradleDependencyGenerator(graph, "$projectDir/testrepogen").generateTestMavenRepo()
buildFile << """
repositories {
maven { url '${mavenrepo.absolutePath}' }
mavenCentral()
}
dependencies {
implementation 'test.nebula:apricot:1.0.0'
}
""".stripIndent()
}
private static String jacksonAlignmentAndSubstitutionRule() {
'''\
{
"align": [
{
"group": "com\\\\.fasterxml\\\\.jackson\\\\.(core|dataformat|datatype|jaxrs|jr|module)",
"excludes": [
"jackson-datatype-jdk7",
"jackson-module-scala_2.12.0-RC1",
"jackson-module-scala_2.12.0-M5",
"jackson-module-scala_2.12.0-M4",
"jackson-module-scala_2.9.3",
"jackson-module-scala_2.9.2",
"jackson-module-scala_2.9.1",
"jackson-module-swagger",
"jackson-module-scala",
"jackson-datatype-hibernate",
"jackson-dataformat-ion"
],
"includes": [],
"reason": "Align all Jackson libraries",
"match": "^(\\\\d+\\\\.)?(\\\\d+\\\\.)?(\\\\*|\\\\d+)?(\\\\.pr\\\\d+)?",
"author": "author",
"date": "2016-05-19"
}
],
"replace": [],
"substitute": [
{
"module": "com.fasterxml.jackson.core:jackson-databind:[2.9.9,2.9.9.3)",
"with": "com.fasterxml.jackson.core:jackson-databind:2.9.9.3",
"reason": "There is a vulnerability, see...",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
],
"deny": [],
"exclude": [],
"reject": []
}
'''.stripIndent()
}
private static void dependenciesAreAligned(String output, String alignedVersion) {
assert output.findAll("com.fasterxml.jackson.core:jackson-annotations:.*$alignedVersion\n").size() > 0
assert output.findAll("com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:.*$alignedVersion\n").size() > 0
assert output.findAll("com.fasterxml.jackson.dataformat:jackson-dataformat-smile:.*$alignedVersion\n").size() > 0
assert output.findAll("com.fasterxml.jackson.datatype:jackson-datatype-guava:.*$alignedVersion\n").size() > 0
assert output.findAll("com.fasterxml.jackson.datatype:jackson-datatype-joda:.*$alignedVersion\n").size() > 0
assert output.findAll("com.fasterxml.jackson.module:jackson-module-jaxb-annotations:.*$alignedVersion\n").size() > 0
assert output.findAll("com.fasterxml.jackson.core:jackson-databind:.*$alignedVersion\n").size() > 0 ||
output.findAll("com.fasterxml.jackson.core:jackson-databind:.*$alignedVersion.[0-9]+\n").size() > 0
}
private static void micropatchVersionIsUsed(String output1, String output2, String alignedVersion) {
assert output1.findAll("com.fasterxml.jackson.core:jackson-databind:.*$alignedVersion.[0-9]+\n").size() > 0
assert output2.findAll("com.fasterxml.jackson.core:jackson-databind:.*$alignedVersion.[0-9]+\n").size() > 0
}
private static void micropatchVersionIsNotUsed(String output1, String output2, String alignedVersion) {
assert output1.findAll("com.fasterxml.jackson.core:jackson-databind:.*$alignedVersion.[0-9]+\n").size() == 0
assert output2.findAll("com.fasterxml.jackson.core:jackson-databind:.*$alignedVersion.[0-9]+\n").size() == 0
assert output1.findAll("com.fasterxml.jackson.core:jackson-databind:.*$alignedVersion\n").size() > 0
assert output2.findAll("com.fasterxml.jackson.core:jackson-databind:.*$alignedVersion\n").size() > 0
}
}
<|start_filename|>src/main/kotlin/nebula/plugin/resolutionrules/configurations.kt<|end_filename|>
package nebula.plugin.resolutionrules
import org.gradle.api.artifacts.Configuration
import org.gradle.api.internal.artifacts.configurations.ConfigurationInternal
import java.lang.reflect.Field
import java.lang.reflect.Modifier
/**
* Various reflection hackiness follows due to deficiencies in the Gradle configuration APIs:
*
* - We can't add the configuration to the configuration container to get the addAction handlers, because it causes ConcurrentModificationExceptions
* - We can't set the configuration name on copyRecursive, which makes for confusing logging output when we're resolving our configurations
*/
fun Any.setField(name: String, value: Any) {
val field = javaClass.findDeclaredField(name)
field.isAccessible = true
lateinit var modifiersField: Field
try {
modifiersField = Field::class.java.getDeclaredField("modifiers")
} catch (e: NoSuchFieldException) {
try {
val getDeclaredFields0 = Class::class.java.getDeclaredMethod("getDeclaredFields0", Boolean::class.javaPrimitiveType)
val accessibleBeforeSet: Boolean = getDeclaredFields0.isAccessible
getDeclaredFields0.isAccessible = true
@Suppress("UNCHECKED_CAST") val declaredFields = getDeclaredFields0.invoke(Field::class.java, false) as Array<Field>
getDeclaredFields0.isAccessible = accessibleBeforeSet
for (declaredField in declaredFields) {
if ("modifiers" == declaredField.name) {
modifiersField = declaredField
break
}
}
} catch (ex: Exception) {
e.addSuppressed(ex)
throw e
}
}
modifiersField.isAccessible = true
modifiersField.setInt(field, field.modifiers and Modifier.FINAL.inv())
field.set(this, value)
}
tailrec fun <T> Class<T>.findDeclaredField(name: String): Field {
val field = declaredFields
.filter { it.name == name }
.singleOrNull()
if (field != null) {
return field
} else if (superclass != null) {
return superclass.findDeclaredField(name)
}
throw IllegalArgumentException("Could not find field $name")
}
fun Configuration.getObservedState(): Configuration.State {
val f: Field = this::class.java.findDeclaredField("observedState")
f.isAccessible = true
val resolvedState = f.get(this) as ConfigurationInternal.InternalState
if (resolvedState != ConfigurationInternal.InternalState.ARTIFACTS_RESOLVED && resolvedState != ConfigurationInternal.InternalState.GRAPH_RESOLVED) {
return Configuration.State.UNRESOLVED
} else
//whether resolution contained error is not handled
return Configuration.State.RESOLVED
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignAndSubstituteRulesWithSpringBoot2xPluginWithoutManagedDepsSpec.groovy<|end_filename|>
package nebula.plugin.resolutionrules
import spock.lang.Unroll
class AlignAndSubstituteRulesWithSpringBoot2xPluginWithoutManagedDepsSpec extends AbstractRulesWithSpringBootPluginSpec {
File rulesJsonFile
def setup() {
rulesJsonFile = new File(projectDir, "rules.json")
debug = true
keepFiles = true
}
@Unroll
def 'direct dep | with lower requested version'() {
given:
// in Spring Boot 2.x plugin, the `io.spring.dependency-management` plugin is added for dependency management. We are not including it here.
setupForDirectDependencyScenario(extSpringBootVersion, forcedVersion, '',
"\n\tspringVersion = \"$extSpringVersion\"")
buildFile << """
dependencies {
implementation "org.springframework:spring-core$requestedVersion"
implementation "org.springframework.boot:spring-boot-starter:$extSpringBootVersion"
implementation "org.springframework.boot:spring-boot-starter-web:$extSpringBootVersion"
}
""".stripIndent()
when:
def result = runTasks(*tasks())
def output = result.output
then:
writeOutputToProjectDir(output)
dependencyInsightContains(output, 'org.springframework:spring-aop', managedSpringVersion)
dependencyInsightContains(output, 'org.springframework:spring-beans', managedSpringVersion)
dependencyInsightContains(output, 'org.springframework:spring-expression', managedSpringVersion)
dependencyInsightContains(output, 'org.springframework:spring-core', managedSpringVersion)
where:
extSpringVersion = '4.2.4.RELEASE'
extSpringBootVersion = '2.1.4.RELEASE'
managedSpringVersion = '5.1.6.RELEASE' // from https://repo1.maven.org/maven2/org/springframework/boot/spring-boot-dependencies/2.1.4.RELEASE/spring-boot-dependencies-2.1.4.RELEASE.pom
requestedVersion = ':\${springVersion}'
forcedVersion = ''
}
@Unroll
def 'direct dep | with higher requested version'() {
given:
// in Spring Boot 2.x plugin, the `io.spring.dependency-management` plugin is added for dependency management. We are not including it here.
setupForDirectDependencyScenario(extSpringBootVersion, forcedVersion, '',
"\n\tspringVersion = \"$extSpringVersion\"")
buildFile << """
dependencies {
implementation "org.springframework:spring-core$requestedVersion"
implementation "org.springframework.boot:spring-boot-starter:$extSpringBootVersion"
implementation "org.springframework.boot:spring-boot-starter-web:$extSpringBootVersion"
}
""".stripIndent()
when:
def result = runTasks(*tasks())
def output = result.output
then:
writeOutputToProjectDir(output)
dependencyInsightContains(output, 'org.springframework:spring-aop', extSpringVersion)
dependencyInsightContains(output, 'org.springframework:spring-beans', extSpringVersion)
dependencyInsightContains(output, 'org.springframework:spring-expression', extSpringVersion)
dependencyInsightContains(output, 'org.springframework:spring-core', extSpringVersion)
where:
extSpringVersion = '5.1.8.RELEASE'
extSpringBootVersion = '2.1.4.RELEASE'
managedSpringVersion = '5.1.6.RELEASE' // from https://repo1.maven.org/maven2/org/springframework/boot/spring-boot-dependencies/2.1.4.RELEASE/spring-boot-dependencies-2.1.4.RELEASE.pom
requestedVersion = ':\${springVersion}'
forcedVersion = '' }
@Unroll
def 'direct dep | with requested version and forced'() {
given:
// in Spring Boot 2.x plugin, the `io.spring.dependency-management` plugin is added for dependency management. We are not including it here.
setupForDirectDependencyScenario(extSpringBootVersion, forcedVersion, '',
"\n\tspringVersion = \"$extSpringVersion\"")
buildFile << """
dependencies {
implementation "org.springframework:spring-core$requestedVersion"
implementation "org.springframework.boot:spring-boot-starter:$extSpringBootVersion"
implementation "org.springframework.boot:spring-boot-starter-web:$extSpringBootVersion"
}
""".stripIndent()
when:
def result = runTasks(*tasks())
def output = result.output
then:
writeOutputToProjectDir(output)
dependencyInsightContains(output, 'org.springframework:spring-aop', forcedVersion)
dependencyInsightContains(output, 'org.springframework:spring-beans', forcedVersion)
dependencyInsightContains(output, 'org.springframework:spring-expression', forcedVersion)
dependencyInsightContains(output, 'org.springframework:spring-core', forcedVersion)
where:
extSpringVersion = '4.2.4.RELEASE'
extSpringBootVersion = '2.1.4.RELEASE'
requestedVersion = ':\${springVersion}'
forcedVersion = '4.2.4.RELEASE'
}
@Unroll
def 'transitive dep | with requested version'() {
given:
// in Spring Boot 2.x plugin, the `io.spring.dependency-management` plugin is added for dependency management. We are not including it here.
setupForTransitiveDependencyScenario(extSpringBootVersion, forcedVersion, '',
"\n\tslf4jVersion = \"$extSlf4jVersion\"")
buildFile << """
dependencies {
implementation "org.slf4j:slf4j-simple$requestedVersion"
}
""".stripIndent()
when:
def result = runTasks(*tasks('org.slf4j'))
def output = result.output
then:
writeOutputToProjectDir(output)
dependencyInsightContains(output, 'org.slf4j:slf4j-simple', extSlf4jVersion)
dependencyInsightContains(output, 'org.slf4j:slf4j-api', extSlf4jVersion)
where:
extSpringVersion = '4.2.4.RELEASE'
extSpringBootVersion = '2.1.4.RELEASE'
extSlf4jVersion = '1.6.0'
requestedVersion = ':\$slf4jVersion'
forcedVersion = ''
}
@Unroll
def 'transitive dep | without requested version and forced'() {
given:
// in Spring Boot 2.x plugin, the `io.spring.dependency-management` plugin is added for dependency management. We are not including it here.
setupForTransitiveDependencyScenario(extSpringBootVersion, forcedVersion, '',
"\n\tslf4jVersion = \"$extSlf4jVersion\"")
buildFile << """
dependencies {
implementation "org.slf4j:slf4j-simple$requestedVersion"
}
""".stripIndent()
when:
def result = runTasks(*tasks('org.slf4j'))
def output = result.output
then:
writeOutputToProjectDir(output)
dependencyInsightContains(output, 'org.slf4j:slf4j-simple', forcedVersion)
dependencyInsightContains(output, 'org.slf4j:slf4j-api', forcedVersion)
where:
extSpringVersion = '4.2.4.RELEASE'
extSpringBootVersion = '2.1.4.RELEASE'
extSlf4jVersion = '1.6.0'
requestedVersion = ''
forcedVersion = '1.7.10'
}
@Unroll
def 'transitive dep | with lower requested version and forced to different version'() {
given:
// in Spring Boot 2.x plugin, the `io.spring.dependency-management` plugin is added for dependency management. We are not including it here.
setupForTransitiveDependencyScenario(extSpringBootVersion, forcedVersion, '',
"\n\tslf4jVersion = \"$extSlf4jVersion\"")
buildFile << """
dependencies {
implementation "org.slf4j:slf4j-simple$requestedVersion"
}
""".stripIndent()
when:
def result = runTasks(*tasks('org.slf4j'))
def output = result.output
then:
writeOutputToProjectDir(output)
dependencyInsightContains(output, 'org.slf4j:slf4j-simple', forcedVersion)
dependencyInsightContains(output, 'org.slf4j:slf4j-api', forcedVersion)
where:
extSpringVersion = '4.2.4.RELEASE'
extSpringBootVersion = '2.1.4.RELEASE'
extSlf4jVersion = '1.6.0'
requestedVersion = ':\$slf4jVersion'
forcedVersion = '1.7.10'
}
@Unroll
def 'transitive dep | with higher requested version and forced to different version'() {
given:
// in Spring Boot 2.x plugin, the `io.spring.dependency-management` plugin is added for dependency management. We are not including it here.
setupForTransitiveDependencyScenario(extSpringBootVersion, forcedVersion, '',
"\n\tslf4jVersion = \"$extSlf4jVersion\"")
buildFile << """
dependencies {
implementation "org.slf4j:slf4j-simple$requestedVersion"
}
""".stripIndent()
when:
def result = runTasks(*tasks('org.slf4j'))
def output = result.output
then:
writeOutputToProjectDir(output)
dependencyInsightContains(output, 'org.slf4j:slf4j-simple', forcedVersion)
dependencyInsightContains(output, 'org.slf4j:slf4j-api', forcedVersion)
where:
extSpringVersion = '4.2.4.RELEASE'
extSpringBootVersion = '2.1.4.RELEASE'
extSlf4jVersion = '1.8.0-beta4'
requestedVersion = ':\$slf4jVersion'
forcedVersion = '1.7.10'
}
@Unroll
def 'transitive dep | with requested version and forced to same version'() {
given:
// in Spring Boot 2.x plugin, the `io.spring.dependency-management` plugin is added for dependency management. We are not including it here.
setupForTransitiveDependencyScenario(extSpringBootVersion, forcedVersion, '',
"\n\tslf4jVersion = \"$extSlf4jVersion\"")
buildFile << """
dependencies {
implementation "org.slf4j:slf4j-simple$requestedVersion"
}
""".stripIndent()
when:
def result = runTasks(*tasks('org.slf4j'))
def output = result.output
then:
writeOutputToProjectDir(output)
dependencyInsightContains(output, 'org.slf4j:slf4j-simple', forcedVersion)
dependencyInsightContains(output, 'org.slf4j:slf4j-api', forcedVersion)
where:
extSpringVersion = '4.2.4.RELEASE'
extSpringBootVersion = '2.1.4.RELEASE'
extSlf4jVersion = '1.6.0'
requestedVersion = ':\$slf4jVersion'
forcedVersion = extSlf4jVersion
}
}
<|start_filename|>src/integTest/groovy/nebula/plugin/resolutionrules/AlignRulesTransitiveDependenciesSpec.groovy<|end_filename|>
package nebula.plugin.resolutionrules
import nebula.test.dependencies.DependencyGraphBuilder
import nebula.test.dependencies.GradleDependencyGenerator
import nebula.test.dependencies.ModuleBuilder
import spock.lang.Issue
import spock.lang.Unroll
class AlignRulesTransitiveDependenciesSpec extends AbstractAlignRulesSpec {
@Unroll
def 'can align transitive dependencies'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:1.1.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:1.1.0')
.addModule(new ModuleBuilder('test.other:c:1.0.0').addDependency('test.nebula:b:1.1.0').build())
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.other:c:1.0.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '+--- test.nebula:a:1.0.0 -> 1.1.0\n'
result.output.contains '\\--- test.other:c:1.0.0\n'
result.output.contains ' \\--- test.nebula:b:1.1.0\n'
}
@Unroll
def 'can align deeper transitive dependencies'() {
def graph = new DependencyGraphBuilder()
.addModule('test.nebula:a:1.0.0')
.addModule('test.nebula:a:1.1.0')
.addModule('test.nebula:b:1.0.0')
.addModule('test.nebula:b:1.1.0')
.addModule(new ModuleBuilder('test.other:c:1.0.0').addDependency('test.nebula:b:1.1.0').build())
.addModule(new ModuleBuilder('test.other:d:1.0.0').addDependency('test.other:c:1.0.0').build())
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.other:d:1.0.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '+--- test.nebula:a:1.0.0 -> 1.1.0\n'
result.output.contains '\\--- test.other:d:1.0.0\n'
result.output.contains ' \\--- test.other:c:1.0.0\n'
result.output.contains ' \\--- test.nebula:b:1.1.0\n'
}
@Unroll
def 'dependencies with cycles do not lead to infinite loops'() {
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.nebula:a:1.0.0').addDependency('test.other:b:1.0.0').build())
.addModule(new ModuleBuilder('test.other:b:1.0.0').addDependency('test.nebula:b:1.0.0').build())
.addModule(new ModuleBuilder('test.nebula:a:1.1.0').addDependency('test.other:b:1.0.0').build())
.addModule('test.nebula:b:1.0.0')
.addModule(new ModuleBuilder('test.nebula:b:1.1.0').addDependency('test.other:b:1.0.0').build())
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:1.1.0'
implementation 'test.nebula:b:1.0.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '+--- test.nebula:a:1.1.0\n'
result.output.contains '| \\--- test.other:b:1.0.0\n'
result.output.contains '| \\--- test.nebula:b:1.0.0 -> 1.1.0\n'
result.output.contains '| \\--- test.other:b:1.0.0 (*)\n'
result.output.contains '\\--- test.nebula:b:1.0.0 -> 1.1.0 (*)\n'
}
@Unroll
def 'able to omit dependency versions to take what is given transitively'() {
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.nebula:a:1.0.0').addDependency('test.nebula:b:1.0.0').build())
.addModule('test.nebula:b:1.0.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '+--- test.nebula:a:1.0.0\n'
result.output.contains '| \\--- test.nebula:b:1.0.0\n'
result.output.contains '\\--- test.nebula:b -> 1.0.0\n'
}
@Unroll
@Issue('#48')
def 'transitive dependencies with alignment are aligned, when parent dependency is also aligned'() {
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.nebula.a:a1:1.0.0').addDependency('test.nebula.b:b1:1.0.0').build())
.addModule(new ModuleBuilder('test.nebula.a:a1:2.0.0').addDependency('test.nebula.b:b1:2.0.0').build())
.addModule('test.nebula.a:a2:1.0.0')
.addModule('test.nebula.a:a2:2.0.0')
.addModule('test.nebula.a:a3:1.0.0')
.addModule('test.nebula.a:a3:2.0.0')
.addModule('test.nebula.b:b1:1.0.0')
.addModule('test.nebula.b:b1:2.0.0')
.addModule(new ModuleBuilder('test.nebula.b:b2:1.0.0').addDependency('test.nebula.a:a3:1.0.0').build())
.addModule(new ModuleBuilder('test.nebula.b:b2:2.0.0').addDependency('test.nebula.a:a3:1.0.0').build())
.addModule('test.nebula.c:c1:1.0.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebulaA",
"group": "test.nebula.a",
"reason": "Align test.nebula.a dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
},
{
"name": "testNebulaB",
"group": "test.nebula.b",
"reason": "Align test.nebula.b dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
// Make at least one of the dependencies a non-expected dependency
configurations.compileClasspath {
resolutionStrategy {
force 'test.nebula.c:c1:1.0.0'
}
}
dependencies {
implementation 'test.nebula.a:a1:1.+'
implementation 'test.nebula.a:a2:latest.release'
implementation 'test.nebula.b:b2:1.0.0'
implementation 'test.nebula.c:c1:1.0.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '+--- test.nebula.a:a1:1.+ -> 2.0.0\n'
result.output.contains '| \\--- test.nebula.b:b1:2.0.0\n'
result.output.contains '+--- test.nebula.a:a2:latest.release -> 2.0.0\n'
result.output.contains '+--- test.nebula.b:b2:1.0.0 -> 2.0.0\n'
result.output.contains '\\--- test.nebula.a:a3:1.0.0 -> 2.0.0\n'
result.output.contains '\\--- test.nebula.c:c1:1.0.0'
}
@Unroll
def 'can align a transitive dependency with multiple versions contributed transitively'() {
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.nebula:a:1.0.0').addDependency('test.nebula:d:2.0.0').build())
.addModule(new ModuleBuilder('test.nebula:b:1.0.0').addDependency('test.nebula:d:3.0.0').build())
.addModule(new ModuleBuilder('test.nebula:c:1.0.0').addDependency('test.nebula:d:1.0.0').build())
.addModule('test.nebula:d:1.0.0')
.addModule('test.nebula:d:2.0.0')
.addModule('test.nebula:d:3.0.0')
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [], "substitute": [], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b:1.0.0'
implementation 'test.nebula:c:1.0.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '\\--- test.nebula:d:3.0.0\n'
}
@Unroll
def 'can align a transitive dependency with direct and use substitution to downgrade'() {
given:
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder('test.nebula:a:1.0.0').addDependency('test.nebula:b:1.0.0').build())
.addModule(new ModuleBuilder('test.nebula:a:1.1.0').addDependency('test.nebula:b:1.1.0').build())
.addModule(new ModuleBuilder('test.nebula:a:1.2.0').addDependency('test.nebula:b:1.2.0').build())
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [],
"substitute": [
{
"module": "test.nebula:a:[1.2.0,)",
"with": "test.nebula:a:1.1.0",
"reason": "Downgrade",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
},
{
"module": "test.nebula:b:[1.2.0,)",
"with": "test.nebula:b:1.1.0",
"reason": "Downgrade",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
], "replace": [],
"align": [
{
"name": "testNebula",
"group": "test.nebula",
"reason": "Align test.nebula dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.nebula:a:1.0.0'
implementation 'test.nebula:b:1.2.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '+--- test.nebula:a:1.0.0 -> 1.1.0'
result.output.contains '\\--- test.nebula:b:1.2.0 -> 1.1.0'
}
@Unroll
def 'alignment of group 1 upgrades and introduces a new dependencies contributing to alignment of group 2'() {
given:
debug = true
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder("test.another:newlyIntroducedParentModule:1.0.0")
.addDependency("test.group2:module1:3.0.0").build())
.addModule("test.group2:module2:3.0.0")
.addModule(new ModuleBuilder("test.another2:module1:1.0.0")
.addDependency("test.group2:module2:2.0.0").build())
.addModule(new ModuleBuilder('test.group1:module1:2.0.0').build())
.addModule(new ModuleBuilder('test.group1:module2:1.0.0')
.addDependency("test.group2:module1:1.0.0").build())
.addModule(new ModuleBuilder('test.group1:module3:1.0.0')
.addDependency("test.group2:module2:2.0.0").build())
.addModule(new ModuleBuilder('test.group1:module2:2.0.0')
.addDependency("test.group2:module1:1.0.0")
.addDependency("test.another2:module1:1.0.0").build())
.addModule(new ModuleBuilder('test.group1:module3:2.0.0')
.addDependency("test.another:newlyIntroducedParentModule:1.0.0").build())
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [],
"substitute": [], "replace": [],
"align": [
{
"name": "testGroup1",
"group": "test.group1",
"reason": "Align test.group1 dependencies",
"author": "<NAME> <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
},
{
"name": "testGroup2",
"group": "test.group2",
"reason": "Align test.group2 dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.group1:module1:2.0.0'
implementation 'test.group1:module2:1.0.0'
implementation 'test.group1:module3:1.0.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '| +--- test.group2:module1:1.0.0 -> 3.0.0'
result.output.contains '| \\--- test.group2:module2:2.0.0 -> 3.0.0'
result.output.contains ' \\--- test.group2:module1:3.0.0'
}
/* This test is currently failing for Nebula alignment due unfixed bug in alignment rule implementation. We decided not to invest into
* fix because the problem is a relative edge case and we will rather focus on migration to Gradle core alignment
* implementation. The test is kept here so we can try this case on top of the new implementation.
* */
@Unroll
def 'alignment of group 1 upgrades and introduces a new dependencies contributing to alignment of group 2 and substitution still takes effect'() {
given:
debug = true
def graph = new DependencyGraphBuilder()
.addModule(new ModuleBuilder("test.another:newlyIntroducedParentModule:1.0.0")
.addDependency("test.group2:module1:3.0.0").build())
.addModule("test.group2:module1:1.1.0")
.addModule("test.group2:module1:2.0.0")
.addModule(new ModuleBuilder("test.another2:module1:1.0.0")
.addDependency("test.group2:module2:2.0.0").build())
.addModule(new ModuleBuilder('test.group1:module1:2.0.0').build())
.addModule(new ModuleBuilder('test.group1:module2:1.0.0')
.addDependency("test.group2:module1:1.0.0").build())
.addModule(new ModuleBuilder('test.group1:module3:1.0.0')
.addDependency("test.group2:module2:1.1.0").build())
.addModule(new ModuleBuilder('test.group1:module2:2.0.0')
.addDependency("test.group2:module1:1.0.0")
.addDependency("test.another2:module1:1.0.0").build())
.addModule(new ModuleBuilder('test.group1:module3:2.0.0')
.addDependency("test.another:newlyIntroducedParentModule:1.0.0").build())
.build()
File mavenrepo = new GradleDependencyGenerator(graph, "${projectDir}/testrepogen").generateTestMavenRepo()
rulesJsonFile << '''\
{
"deny": [], "reject": [],
"substitute": [
{
"module": "test.group2:module1:[3.0.0,)",
"with": "test.group2:module1:2.0.0",
"reason": "Downgrade",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
},
{
"module": "test.group2:module2:[3.0.0,)",
"with": "test.group2:module2:2.0.0",
"reason": "Downgrade",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
], "replace": [],
"align": [
{
"name": "testGroup1",
"group": "test.group1",
"reason": "Align test.group1 dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
},
{
"name": "testGroup2",
"group": "test.group2",
"reason": "Align test.group2 dependencies",
"author": "Example Person <<EMAIL>>",
"date": "2016-03-17T20:21:20.368Z"
}
]
}
'''.stripIndent()
buildFile << """\
repositories {
maven { url '${mavenrepo.absolutePath}' }
}
dependencies {
implementation 'test.group1:module1:2.0.0'
implementation 'test.group1:module2:1.0.0'
implementation 'test.group1:module3:1.0.0'
}
""".stripIndent()
when:
def result = runTasks('dependencies', '--configuration', 'compileClasspath')
then:
result.output.contains '| +--- test.group2:module1:1.0.0 -> 2.0.0'
result.output.contains '| \\--- test.group2:module2:2.0.0'
result.output.contains ' \\--- test.group2:module1:3.0.0 -> 2.0.0'
}
}
| nebula-plugins/gradle-resolution-rules-plugin |
<|start_filename|>src/main/c/hello-world.c<|end_filename|>
/* example based on examples/hello_ex.c in cdk lib sources */
#include "hello-time.h"
#include "hello-greet.h"
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#ifdef __ANDROID__
int main(int argc, char** argv) {
char *who = "world";
if (argc > 1) {
who = argv[1];
}
char *msg = get_greet(who);
printf("%s\n", msg);
free(msg);
print_localtime();
return 0;
}
#else
#include <cdk_test.h>
int main (int argc, char **argv)
{
CDKSCREEN *cdkscreen;
CDKLABEL *demo;
const char *mesg[4];
CDK_PARAMS params;
CDKparseParams (argc, argv, ¶ms, CDK_MIN_PARAMS);
cdkscreen = initCDKScreen (NULL);
/* Start CDK Colors. */
initCDKColor ();
/* Set the labels up. */
mesg[0] = "</5><#UL><#HL(30)><#UR>";
mesg[1] = "</5><#VL(10)>Hello World!<#VL(10)>";
mesg[2] = "</5><#LL><#HL(30)><#LR>";
/* Declare the labels. */
demo = newCDKLabel (cdkscreen,
CDKparamValue (¶ms, 'X', CENTER),
CDKparamValue (¶ms, 'Y', CENTER),
(CDK_CSTRING2) mesg, 3,
CDKparamValue (¶ms, 'N', TRUE),
CDKparamValue (¶ms, 'S', TRUE));
/* Is the label null? */
if (demo == 0)
{
/* Clean up the memory. */
destroyCDKScreen (cdkscreen);
/* End curses... */
endCDK ();
printf ("Cannot create the label. Is the window too small?\n");
ExitProgram (EXIT_FAILURE);
}
setCDKLabelBackgroundAttrib (demo, COLOR_PAIR (2));
/* Draw the CDK screen. */
refreshCDKScreen (cdkscreen);
waitCDKLabel (demo, ' ');
/* Clean up. */
destroyCDKLabel (demo);
destroyCDKScreen (cdkscreen);
endCDK ();
ExitProgram (EXIT_SUCCESS);
}
#endif
<|start_filename|>src/main/android/jni/goodbye_api.c<|end_filename|>
#include "org_example_hello_GoodbyeApi.h"
#include <stdlib.h>
#include <string.h>
static const char* str = "GOODBYE, JNI";
JNIEXPORT jstring JNICALL
Java_org_example_hello_GoodbyeApi_goodbye (JNIEnv * env, jclass klass)
{
int len = strlen(str);
jchar *str1;
str1 = (jchar *)(malloc(len * sizeof(jchar)));
for (int i = 0; i < len; i++) {
str1[i] = (unsigned char)str[i];
}
jstring result = (*env)->NewString(env, str1, len);
free(str1);
return result;
}
<|start_filename|>src/main/c/hello-greet.h<|end_filename|>
#ifndef LIB_HELLO_GREET_H_
#define LIB_HELLO_GREET_H_
char *get_greet(char *thing);
#endif
<|start_filename|>src/main/c/hello-greet.c<|end_filename|>
#include "hello-greet.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
char * get_greet(char *who) {
int len = strlen(who) + 5 + 2; /* 2 = 1 for the space, one for terminal null */
char *s = malloc(len);
snprintf(s, len, "%s %s", "Hello", who);
return s;
}
<|start_filename|>src/main/android/java/org/example/hello/HelloApi.java<|end_filename|>
package org.example.hello;
/**
* JNI API for hello-greet native lib
*/
public class HelloApi {
public static native String hello();
}
| lyf00002/bazel-crosscompile |
<|start_filename|>packages/babel-plugin-jsx/jest.config.js<|end_filename|>
module.exports = {
setupFiles: ['./test/setup.ts'],
transform: {
'\\.(ts|tsx)$': 'ts-jest',
},
globals: {
'ts-jest': {
babelConfig: true,
},
},
};
| thisisandy/jsx |
<|start_filename|>src/css/icons/icon-circle-times.css<|end_filename|>
/* Circle Times icon */
.marka.marka-icon-circle-times i {
border-radius: 50%;
-webkit-transform: scale(0.8);
-moz-transform: scale(0.8);
-ms-transform: scale(0.8);
-o-transform: scale(0.8);
transform: scale(0.8);
}
.marka.marka-icon-circle-times i:last-child {
border-radius: 0%;
-webkit-transform: rotate(45deg) scale(0.5, 0.125);
-moz-transform: rotate(45deg) scale(0.5, 0.125);
-ms-transform: rotate(45deg) scale(0.5, 0.125);
-o-transform: rotate(45deg) scale(0.5, 0.125);
transform: rotate(45deg) scale(0.5, 0.125);
}
.marka.marka-icon-circle-times i:nth-last-child(2) {
border-radius: 0%;
-webkit-transform: rotate(-45deg) scale(0.5, 0.125);
-moz-transform: rotate(-45deg) scale(0.5, 0.125);
-ms-transform: rotate(-45deg) scale(0.5, 0.125);
-o-transform: rotate(-45deg) scale(0.5, 0.125);
transform: rotate(-45deg) scale(0.5, 0.125);
}
<|start_filename|>src/css/icons/icon-bars.css<|end_filename|>
/* Bars icon */
.marka.marka-icon-bars i {
-webkit-transform: scale(0.8, 0.2);
-moz-transform: scale(0.8, 0.2);
-ms-transform: scale(0.8, 0.2);
-o-transform: scale(0.8, 0.2);
transform: scale(0.8, 0.2);
}
.marka.marka-icon-bars i:nth-child(2) {
-webkit-transform: scale(0.8, 0.2) translate(0%, -140%);
-moz-transform: scale(0.8, 0.2) translate(0%, -140%);
-ms-transform: scale(0.8, 0.2) translate(0%, -140%);
-o-transform: scale(0.8, 0.2) translate(0%, -140%);
transform: scale(0.8, 0.2) translate(0%, -140%);
}
.marka.marka-icon-bars i:nth-child(3) {
-webkit-transform: scale(0.8, 0.2) translate(0%, 140%);
-moz-transform: scale(0.8, 0.2) translate(0%, 140%);
-ms-transform: scale(0.8, 0.2) translate(0%, 140%);
-o-transform: scale(0.8, 0.2) translate(0%, 140%);
transform: scale(0.8, 0.2) translate(0%, 140%);
}
<|start_filename|>src/css/icons/icon-times.css<|end_filename|>
/* Times icon */
.marka.marka-icon-times i {
-webkit-transform: rotate(45deg) scale(0.8, 0.2);
-moz-transform: rotate(45deg) scale(0.8, 0.2);
-ms-transform: rotate(45deg) scale(0.8, 0.2);
-o-transform: rotate(45deg) scale(0.8, 0.2);
transform: rotate(45deg) scale(0.8, 0.2);
}
.marka.marka-icon-times i:nth-child(2) {
-webkit-transform: rotate(-45deg) scale(0.8, 0.2);
-moz-transform: rotate(-45deg) scale(0.8, 0.2);
-ms-transform: rotate(-45deg) scale(0.8, 0.2);
-o-transform: rotate(-45deg) scale(0.8, 0.2);
transform: rotate(-45deg) scale(0.8, 0.2);
}
<|start_filename|>src/css/icons/icon-arrow.css<|end_filename|>
/* Arrow icon */
.marka.marka-icon-arrow i {
-webkit-transform: rotate(45deg) scale(0.55, 0.2) translate(-0.5%, -90%);
-moz-transform: rotate(45deg) scale(0.55, 0.2) translate(-0.5%, -90%);
-ms-transform: rotate(45deg) scale(0.55, 0.2) translate(-0.5%, -90%);
-o-transform: rotate(45deg) scale(0.55, 0.2) translate(-0.5%, -90%);
transform: rotate(45deg) scale(0.55, 0.2) translate(-0.5%, -90%);
}
.marka.marka-icon-arrow i:nth-child(2) {
-webkit-transform: rotate(135deg) scale(0.55, 0.2) translate(-0.5%, 90%);
-moz-transform: rotate(135deg) scale(0.55, 0.2) translate(-0.5%, 90%);
-ms-transform: rotate(135deg) scale(0.55, 0.2) translate(-0.5%, 90%);
-o-transform: rotate(135deg) scale(0.55, 0.2) translate(-0.5%, 90%);
transform: rotate(135deg) scale(0.55, 0.2) translate(-0.5%, 90%);
}
.marka.marka-icon-arrow i:nth-child(3) {
-webkit-transform: rotate(90deg) scale(0.6, 0.2) translate(17%, 0%);
-moz-transform: rotate(90deg) scale(0.6, 0.2) translate(17%, 0%);
-ms-transform: rotate(90deg) scale(0.6, 0.2) translate(17%, 0%);
-o-transform: rotate(90deg) scale(0.6, 0.2) translate(17%, 0%);
transform: rotate(90deg) scale(0.6, 0.2) translate(17%, 0%);
}
<|start_filename|>src/css/icons/icon-chevron.css<|end_filename|>
/* Chevron icon */
.marka.marka-icon-chevron i {
-webkit-transform: rotate(45deg) scale(0.6, 0.2) translate(16.5%, -50%);
-moz-transform: rotate(45deg) scale(0.6, 0.2) translate(16.5%, -50%);
-ms-transform: rotate(45deg) scale(0.6, 0.2) translate(16.5%, -50%);
-o-transform: rotate(45deg) scale(0.6, 0.2) translate(16.5%, -50%);
transform: rotate(45deg) scale(0.6, 0.2) translate(16.5%, -50%);
}
.marka.marka-icon-chevron i:nth-child(2) {
-webkit-transform: rotate(135deg) scale(0.6, 0.2) translate(16.5%, 50%);
-moz-transform: rotate(135deg) scale(0.6, 0.2) translate(16.5%, 50%);
-ms-transform: rotate(135deg) scale(0.6, 0.2) translate(16.5%, 50%);
-o-transform: rotate(135deg) scale(0.6, 0.2) translate(16.5%, 50%);
transform: rotate(135deg) scale(0.6, 0.2) translate(16.5%, 50%);
}
<|start_filename|>src/css/icons/icon-circle.css<|end_filename|>
/* Circle icon */
.marka.marka-icon-circle i {
border-radius: 50%;
-webkit-transform: scale(0.8);
-moz-transform: scale(0.8);
-ms-transform: scale(0.8);
-o-transform: scale(0.8);
transform: scale(0.8);
}
<|start_filename|>src/css/icons/icon-sort.css<|end_filename|>
/* Sort icon */
.marka.marka-icon-sort i {
border-radius: 0% 30% 0 30%;
-webkit-transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-moz-transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-ms-transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-o-transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
}
.marka.marka-icon-sort i:nth-child(2) {
-webkit-transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-moz-transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-ms-transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-o-transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
}
.marka.marka-icon-sort i:nth-child(3) {
-webkit-transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-moz-transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-ms-transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-o-transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
}
.marka.marka-icon-sort i:nth-child(4) {
-webkit-transform: rotate(120deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-moz-transform: rotate(120deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-ms-transform: rotate(120deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-o-transform: rotate(120deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
transform: rotate(120deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
}
.marka.marka-icon-sort i:nth-child(5) {
-webkit-transform: rotate(180deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-moz-transform: rotate(180deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-ms-transform: rotate(180deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-o-transform: rotate(180deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
transform: rotate(180deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
}
.marka.marka-icon-sort i:nth-child(6) {
-webkit-transform: rotate(270deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-moz-transform: rotate(270deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-ms-transform: rotate(270deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-o-transform: rotate(270deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
transform: rotate(270deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
}
<|start_filename|>src/css/icons/icon-signal-five.css<|end_filename|>
/* Signal icon (5 part) */
.marka.marka-icon-signal-five-one i,
.marka.marka-icon-signal-five-two i,
.marka.marka-icon-signal-five-three i,
.marka.marka-icon-signal-five-four i,
.marka.marka-icon-signal-five i {
-webkit-transform: rotate(90deg) scale(0.16, 0.12) translate(200%, 280%);
-moz-transform: rotate(90deg) scale(0.16, 0.12) translate(200%, 280%);
-ms-transform: rotate(90deg) scale(0.16, 0.12) translate(200%, 280%);
-o-transform: rotate(90deg) scale(0.16, 0.12) translate(200%, 280%);
transform: rotate(90deg) scale(0.16, 0.12) translate(200%, 280%);
}
.marka.marka-icon-signal-five-two i:nth-child(2),
.marka.marka-icon-signal-five-two i:nth-child(3),
.marka.marka-icon-signal-five-two i:nth-child(4),
.marka.marka-icon-signal-five-two i:nth-child(5),
.marka.marka-icon-signal-five-three i:nth-child(2),
.marka.marka-icon-signal-five-four i:nth-child(2),
.marka.marka-icon-signal-five i:nth-child(2) {
-webkit-transform: rotate(90deg) scale(0.32, 0.12) translate(75%, 140%);
-moz-transform: rotate(90deg) scale(0.32, 0.12) translate(75%, 140%);
-ms-transform: rotate(90deg) scale(0.32, 0.12) translate(75%, 140%);
-o-transform: rotate(90deg) scale(0.32, 0.12) translate(75%, 140%);
transform: rotate(90deg) scale(0.32, 0.12) translate(75%, 140%);
}
.marka.marka-icon-signal-five-three i:nth-child(3),
.marka.marka-icon-signal-five-three i:nth-child(4),
.marka.marka-icon-signal-five-three i:nth-child(5),
.marka.marka-icon-signal-five-four i:nth-child(3),
.marka.marka-icon-signal-five i:nth-child(3) {
-webkit-transform: rotate(90deg) scale(0.48, 0.12) translate(34%, 0%);
-moz-transform: rotate(90deg) scale(0.48, 0.12) translate(34%, 0%);
-ms-transform: rotate(90deg) scale(0.48, 0.12) translate(34%, 0%);
-o-transform: rotate(90deg) scale(0.48, 0.12) translate(34%, 0%);
transform: rotate(90deg) scale(0.48, 0.12) translate(34%, 0%);
}
.marka.marka-icon-signal-five-four i:nth-child(4),
.marka.marka-icon-signal-five-four i:nth-child(5),
.marka.marka-icon-signal-five i:nth-child(4) {
-webkit-transform: rotate(90deg) scale(0.64, 0.12) translate(13%, -140%);
-moz-transform: rotate(90deg) scale(0.64, 0.12) translate(13%, -140%);
-ms-transform: rotate(90deg) scale(0.64, 0.12) translate(13%, -140%);
-o-transform: rotate(90deg) scale(0.64, 0.12) translate(13%, -140%);
transform: rotate(90deg) scale(0.64, 0.12) translate(13%, -140%);
}
.marka.marka-icon-signal-five i:nth-child(5) {
-webkit-transform: rotate(90deg) scale(0.8, 0.12) translate(0%, -280%);
-moz-transform: rotate(90deg) scale(0.8, 0.12) translate(0%, -280%);
-ms-transform: rotate(90deg) scale(0.8, 0.12) translate(0%, -280%);
-o-transform: rotate(90deg) scale(0.8, 0.12) translate(0%, -280%);
transform: rotate(90deg) scale(0.8, 0.12) translate(0%, -280%);
}
<|start_filename|>src/css/icons/icon-triangle.css<|end_filename|>
/* Triangle icon */
.marka.marka-icon-triangle i {
border-radius: 0% 30% 0 30%;
-webkit-transform: rotate(-60deg) skewX(-30deg) scale(0.5, 0.433) translate(2%, -5%);
-moz-transform: rotate(-60deg) skewX(-30deg) scale(0.5, 0.433) translate(2%, -5%);
-ms-transform: rotate(-60deg) skewX(-30deg) scale(0.5, 0.433) translate(2%, -5%);
-o-transform: rotate(-60deg) skewX(-30deg) scale(0.5, 0.433) translate(2%, -5%);
transform: rotate(-60deg) skewX(-30deg) scale(0.5, 0.433) translate(2%, -5%);
}
.marka.marka-icon-triangle i:nth-child(2) {
-webkit-transform: rotate(180deg) skewX(-30deg) scale(0.5) translate(5%, -30%);
-moz-transform: rotate(180deg) skewX(-30deg) scale(0.5) translate(5%, -30%);
-ms-transform: rotate(180deg) skewX(-30deg) scale(0.5) translate(5%, -30%);
-o-transform: rotate(180deg) skewX(-30deg) scale(0.5) translate(5%, -30%);
transform: rotate(180deg) skewX(-30deg) scale(0.5) translate(5%, -30%);
}
.marka.marka-icon-triangle i:nth-child(3) {
-webkit-transform: rotate(90deg) skewY(-30deg) scale(0.5) translate(30%, -2%);
-moz-transform: rotate(90deg) skewY(-30deg) scale(0.5) translate(30%, -2%);
-ms-transform: rotate(90deg) skewY(-30deg) scale(0.5) translate(30%, -2%);
-o-transform: rotate(90deg) skewY(-30deg) scale(0.5) translate(30%, -2%);
transform: rotate(90deg) skewY(-30deg) scale(0.5) translate(30%, -2%);
}
<|start_filename|>src/css/icons/icon-asterisk.css<|end_filename|>
/* Asterisk icon */
.marka.marka-icon-asterisk i {
-webkit-transform: rotate(90deg) scale(0.8, 0.2);
-moz-transform: rotate(90deg) scale(0.8, 0.2);
-ms-transform: rotate(90deg) scale(0.8, 0.2);
-o-transform: rotate(90deg) scale(0.8, 0.2);
transform: rotate(90deg) scale(0.8, 0.2);
}
.marka.marka-icon-asterisk i:nth-child(2) {
-webkit-transform: rotate(-30deg) scale(0.8, 0.2);
-moz-transform: rotate(-30deg) scale(0.8, 0.2);
-ms-transform: rotate(-30deg) scale(0.8, 0.2);
-o-transform: rotate(-30deg) scale(0.8, 0.2);
transform: rotate(-30deg) scale(0.8, 0.2);
}
.marka.marka-icon-asterisk i:nth-child(3) {
-webkit-transform: rotate(30deg) scale(0.8, 0.2);
-moz-transform: rotate(30deg) scale(0.8, 0.2);
-ms-transform: rotate(30deg) scale(0.8, 0.2);
-o-transform: rotate(30deg) scale(0.8, 0.2);
transform: rotate(30deg) scale(0.8, 0.2);
}
<|start_filename|>_site/Gruntfile.js<|end_filename|>
module.exports = function(grunt) {
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
// Clean all compiled files
clean: [
// Clean compiled css in source
'src/css/**/*.css',
// Clean docs static css
'docs/static/marka/css/**/*.css'
],
// Compile all LESS files individually
less: {
compile: {
options: {
},
files: [{
expand: true,
cwd: 'src/less',
src: ['**/*.less', '!{boot,var,mix}*.less'],
dest: 'src/css/',
ext: '.css'
}]
}
},
// Add license
concat: {
options: {
banner: '\n/*! \n' +
' * Marka - v<%= pkg.version %> \n' +
' * http://fian.my.id/marka \n' +
' * \n' +
' * Copyright 2014 <NAME> and other contributors \n' +
' * Released under the MIT license \n' +
' * https://github.com/fians/marka/blob/master/LICENSE \n' +
' */ \n',
},
css: {
src: ['src/css/marka-core.css', 'src/css/icons/*.css'],
dest: 'dist/css/marka.css'
},
js: {
src: ['src/js/marka.js'],
dest: 'dist/js/marka.js'
}
},
cssmin: {
minify: {
files: {
'dist/css/marka.min.css': ['dist/css/marka.css']
}
}
},
jshint: {
files: [
'gruntfile.js',
'src/**/*.js',
],
options: {
globals: {
console: true
}
}
},
uglify: {
options: {
mangle: true,
sourceMap: true,
sourceMapName: 'dist/js/marka.min.js.map',
preserveComments: 'some'
},
my_target: {
files: {
'dist/js/marka.min.js': ['dist/js/marka.js']
}
}
},
// Copy compiled file to docs
copy: {
distJSToDocs: {
expand : true,
cwd: 'dist/js/',
src: 'marka.js',
dest: 'docs/static/marka/js'
},
distCSStoDocs: {
expand : true,
cwd: 'dist/css/',
src: '**',
dest: 'docs/static/marka/css/'
},
srcCSStoDocs: {
expand: true,
cwd: 'src/css/',
src: '**/*.css',
dest: 'docs/static/marka/css/src'
}
},
watch: {
script: {
options: {
spawn: false,
event: ['added', 'deleted', 'changed']
},
files: ['src/**/*.js', 'src/**/*.less'],
tasks: ['less', 'concat', 'cssmin', 'jshint', 'uglify', 'copy']
},
grunt: {
files: ['Gruntfile.js']
}
}
});
// Load module
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-less');
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-cssmin');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-watch');
// Create grunt task
grunt.registerTask('build', ['less', 'concat', 'cssmin', 'jshint', 'uglify', 'copy']);
};
<|start_filename|>src/css/icons/icon-minus.css<|end_filename|>
/* Minus icon */
.marka.marka-icon-minus i {
-webkit-transform: scale(0.8, 0.2);
-moz-transform: scale(0.8, 0.2);
-ms-transform: scale(0.8, 0.2);
-o-transform: scale(0.8, 0.2);
transform: scale(0.8, 0.2);
}
<|start_filename|>src/css/icons/icon-square.css<|end_filename|>
/* Square icon */
.marka.marka-icon-square i {
border-radius: 10%;
-webkit-transform: scale(0.8);
-moz-transform: scale(0.8);
-ms-transform: scale(0.8);
-o-transform: scale(0.8);
transform: scale(0.8);
}
<|start_filename|>src/css/icons/icon-square-o-minus.css<|end_filename|>
/* Square-O Minus icon */
.marka.marka-icon-square-o-minus i {
border-radius: 0%;
-webkit-transform: rotate(0deg) scale(0.5, 0.125);
-moz-transform: rotate(0deg) scale(0.5, 0.125);
-ms-transform: rotate(0deg) scale(0.5, 0.125);
-o-transform: rotate(0deg) scale(0.5, 0.125);
transform: rotate(0deg) scale(0.5, 0.125);
}
.marka.marka-icon-square-o-minus i:nth-child(1) {
border-radius: 10%;
-webkit-transform: scale(0.8);
-moz-transform: scale(0.8);
-ms-transform: scale(0.8);
-o-transform: scale(0.8);
transform: scale(0.8);
}
.marka.marka-icon-square-o-minus i:nth-child(2) {
border-radius: 10%;
-webkit-transform: scale(0.65);
-moz-transform: scale(0.65);
-ms-transform: scale(0.65);
-o-transform: scale(0.65);
transform: scale(0.65);
}
<|start_filename|>src/css/icons/icon-circle-o.css<|end_filename|>
/* Circle-O icon */
.marka.marka-icon-circle-o i {
border-radius: 50%;
-webkit-transform: rotate(0deg) scale(0);
-moz-transform: rotate(0deg) scale(0);
-ms-transform: rotate(0deg) scale(0);
-o-transform: rotate(0deg) scale(0);
transform: rotate(0deg) scale(0);
}
.marka.marka-icon-circle-o i:nth-child(1) {
-webkit-transform: rotate(0deg) scale(0.8);
-moz-transform: rotate(0deg) scale(0.8);
-ms-transform: rotate(0deg) scale(0.8);
-o-transform: rotate(0deg) scale(0.8);
transform: rotate(0deg) scale(0.8);
}
.marka.marka-icon-circle-o i:nth-child(2) {
-webkit-transform: rotate(0deg) scale(0.65);
-moz-transform: rotate(0deg) scale(0.65);
-ms-transform: rotate(0deg) scale(0.65);
-o-transform: rotate(0deg) scale(0.65);
transform: rotate(0deg) scale(0.65);
}
<|start_filename|>src/css/icons/icon-sort-half.css<|end_filename|>
/* Sort half icon */
.marka.marka-icon-sort-half i {
border-radius: 0% 30% 0 30%;
-webkit-transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-moz-transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-ms-transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
-o-transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
transform: rotate(-60deg) skewX(-30deg) scale(0.25, 0.216) translate(60%, -60%);
}
.marka.marka-icon-sort-half i:nth-child(2n) {
-webkit-transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-moz-transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-ms-transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
-o-transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
transform: rotate(0deg) skewX(-30deg) scale(0.25) translate(-60%, -70%);
}
.marka.marka-icon-sort-half i:nth-child(3n) {
-webkit-transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-moz-transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-ms-transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
-o-transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
transform: rotate(90deg) skewY(-30deg) scale(0.25) translate(-70%, -60%);
}
<|start_filename|>src/css/icons/icon-square-o-times.css<|end_filename|>
/* Square-O Times icon */
.marka.marka-icon-square-o-times i {
border-radius: 0%;
-webkit-transform: rotate(45deg) scale(0.5, 0.125);
-moz-transform: rotate(45deg) scale(0.5, 0.125);
-ms-transform: rotate(45deg) scale(0.5, 0.125);
-o-transform: rotate(45deg) scale(0.5, 0.125);
transform: rotate(45deg) scale(0.5, 0.125);
}
.marka.marka-icon-square-o-times i:nth-child(1) {
border-radius: 10%;
-webkit-transform: scale(0.8);
-moz-transform: scale(0.8);
-ms-transform: scale(0.8);
-o-transform: scale(0.8);
transform: scale(0.8);
}
.marka.marka-icon-square-o-times i:nth-child(2) {
border-radius: 10%;
-webkit-transform: scale(0.65);
-moz-transform: scale(0.65);
-ms-transform: scale(0.65);
-o-transform: scale(0.65);
transform: scale(0.65);
}
.marka.marka-icon-square-o-times i:nth-child(3) {
border-radius: 0%;
-webkit-transform: rotate(-45deg) scale(0.5, 0.125);
-moz-transform: rotate(-45deg) scale(0.5, 0.125);
-ms-transform: rotate(-45deg) scale(0.5, 0.125);
-o-transform: rotate(-45deg) scale(0.5, 0.125);
transform: rotate(-45deg) scale(0.5, 0.125);
}
<|start_filename|>src/css/icons/icon-angle.css<|end_filename|>
/* Angle icon */
.marka.marka-icon-angle i {
-webkit-transform: rotate(45deg) scale(0.4, 0.1) translate(20%, -75%);
-moz-transform: rotate(45deg) scale(0.4, 0.1) translate(20%, -75%);
-ms-transform: rotate(45deg) scale(0.4, 0.1) translate(20%, -75%);
-o-transform: rotate(45deg) scale(0.4, 0.1) translate(20%, -75%);
transform: rotate(45deg) scale(0.4, 0.1) translate(20%, -75%);
}
.marka.marka-icon-angle i:nth-child(2n) {
-webkit-transform: rotate(135deg) scale(0.4, 0.1) translate(20%, 75%);
-moz-transform: rotate(135deg) scale(0.4, 0.1) translate(20%, 75%);
-ms-transform: rotate(135deg) scale(0.4, 0.1) translate(20%, 75%);
-o-transform: rotate(135deg) scale(0.4, 0.1) translate(20%, 75%);
transform: rotate(135deg) scale(0.4, 0.1) translate(20%, 75%);
}
<|start_filename|>src/css/marka-core.css<|end_filename|>
/* Core style */
.marka {
position: relative;
display: inline-block;
vertical-align: bottom;
-webkit-transition: all 500ms;
-moz-transition: all 500ms;
-o-transition: all 500ms;
transition: all 500ms;
}
.marka i {
position: absolute;
display: block;
width: 100%;
height: 100%;
opacity: 0;
background: #000000;
-webkit-transition: all 500ms;
-moz-transition: all 500ms;
-o-transition: all 500ms;
transition: all 500ms;
}
.marka.marka-set i {
opacity: 1;
}
.marka.marka-rotate-right {
-webkit-transform: rotate(90deg);
-moz-transform: rotate(90deg);
-ms-transform: rotate(90deg);
-o-transform: rotate(90deg);
transform: rotate(90deg);
}
.marka.marka-rotate-left {
-webkit-transform: rotate(-90deg);
-moz-transform: rotate(-90deg);
-ms-transform: rotate(-90deg);
-o-transform: rotate(-90deg);
transform: rotate(-90deg);
}
.marka.marka-rotate-down {
-webkit-transform: rotate(180deg);
-moz-transform: rotate(180deg);
-ms-transform: rotate(180deg);
-o-transform: rotate(180deg);
transform: rotate(180deg);
}
.marka.marka-icon-empty {
-webkit-transform: scale(0);
-moz-transform: scale(0);
-ms-transform: scale(0);
-o-transform: scale(0);
transform: scale(0);
}
<|start_filename|>src/css/icons/icon-circle-minus.css<|end_filename|>
/* Circle Minus icon */
.marka.marka-icon-circle-minus i {
border-radius: 50%;
-webkit-transform: scale(0.8);
-moz-transform: scale(0.8);
-ms-transform: scale(0.8);
-o-transform: scale(0.8);
transform: scale(0.8);
}
.marka.marka-icon-circle-minus i:last-child,
.marka.marka-icon-circle-minus i:nth-last-child(2) {
border-radius: 0%;
-webkit-transform: rotate(0deg) scale(0.5, 0.125);
-moz-transform: rotate(0deg) scale(0.5, 0.125);
-ms-transform: rotate(0deg) scale(0.5, 0.125);
-o-transform: rotate(0deg) scale(0.5, 0.125);
transform: rotate(0deg) scale(0.5, 0.125);
}
<|start_filename|>src/css/icons/icon-square-o-filled.css<|end_filename|>
/* Square-O (filled-version) icon */
.marka.marka-icon-square-o-filled i {
border-radius: 10%;
-webkit-transform: rotate(0deg) scale(0.5);
-moz-transform: rotate(0deg) scale(0.5);
-ms-transform: rotate(0deg) scale(0.5);
-o-transform: rotate(0deg) scale(0.5);
transform: rotate(0deg) scale(0.5);
}
.marka.marka-icon-square-o-filled i:nth-child(1) {
-webkit-transform: rotate(0deg) scale(0.8);
-moz-transform: rotate(0deg) scale(0.8);
-ms-transform: rotate(0deg) scale(0.8);
-o-transform: rotate(0deg) scale(0.8);
transform: rotate(0deg) scale(0.8);
}
.marka.marka-icon-square-o-filled i:nth-child(2) {
-webkit-transform: rotate(0deg) scale(0.65);
-moz-transform: rotate(0deg) scale(0.65);
-ms-transform: rotate(0deg) scale(0.65);
-o-transform: rotate(0deg) scale(0.65);
transform: rotate(0deg) scale(0.65);
}
<|start_filename|>src/css/icons/icon-plus.css<|end_filename|>
/* Plus icon */
.marka.marka-icon-plus i {
-webkit-transform: scale(0.8, 0.2);
-moz-transform: scale(0.8, 0.2);
-ms-transform: scale(0.8, 0.2);
-o-transform: scale(0.8, 0.2);
transform: scale(0.8, 0.2);
}
.marka.marka-icon-plus i:nth-child(2) {
-webkit-transform: rotate(90deg) scale(0.8, 0.2);
-moz-transform: rotate(90deg) scale(0.8, 0.2);
-ms-transform: rotate(90deg) scale(0.8, 0.2);
-o-transform: rotate(90deg) scale(0.8, 0.2);
transform: rotate(90deg) scale(0.8, 0.2);
}
<|start_filename|>src/css/icons/icon-check.css<|end_filename|>
/* Check icon */
.marka.marka-icon-check i {
-webkit-transform: rotate(-45deg) scale(0.75, 0.2) translate(10%, 50%);
-moz-transform: rotate(-45deg) scale(0.75, 0.2) translate(10%, 50%);
-ms-transform: rotate(-45deg) scale(0.75, 0.2) translate(10%, 50%);
-o-transform: rotate(-45deg) scale(0.75, 0.2) translate(10%, 50%);
transform: rotate(-45deg) scale(0.75, 0.2) translate(10%, 50%);
}
.marka.marka-icon-check i:nth-child(2) {
-webkit-transform: rotate(45deg) scale(0.5, 0.2) translate(-10%, 120%);
-moz-transform: rotate(45deg) scale(0.5, 0.2) translate(-10%, 120%);
-ms-transform: rotate(45deg) scale(0.5, 0.2) translate(-10%, 120%);
-o-transform: rotate(45deg) scale(0.5, 0.2) translate(-10%, 120%);
transform: rotate(45deg) scale(0.5, 0.2) translate(-10%, 120%);
}
<|start_filename|>src/css/icons/icon-angle-double.css<|end_filename|>
/* Angle icon */
.marka.marka-icon-angle-double i {
-webkit-transform: rotate(45deg) scale(0.4, 0.1) translate(0%, -150%);
-moz-transform: rotate(45deg) scale(0.4, 0.1) translate(0%, -150%);
-ms-transform: rotate(45deg) scale(0.4, 0.1) translate(0%, -150%);
-o-transform: rotate(45deg) scale(0.4, 0.1) translate(0%, -150%);
transform: rotate(45deg) scale(0.4, 0.1) translate(0%, -150%);
}
.marka.marka-icon-angle-double i:nth-child(2) {
-webkit-transform: rotate(135deg) scale(0.4, 0.1) translate(0%, 150%);
-moz-transform: rotate(135deg) scale(0.4, 0.1) translate(0%, 150%);
-ms-transform: rotate(135deg) scale(0.4, 0.1) translate(0%, 150%);
-o-transform: rotate(135deg) scale(0.4, 0.1) translate(0%, 150%);
transform: rotate(135deg) scale(0.4, 0.1) translate(0%, 150%);
}
.marka.marka-icon-angle-double i:nth-child(3) {
-webkit-transform: rotate(45deg) scale(0.4, 0.1) translate(40%, 10%);
-moz-transform: rotate(45deg) scale(0.4, 0.1) translate(40%, 10%);
-ms-transform: rotate(45deg) scale(0.4, 0.1) translate(40%, 10%);
-o-transform: rotate(45deg) scale(0.4, 0.1) translate(40%, 10%);
transform: rotate(45deg) scale(0.4, 0.1) translate(40%, 10%);
}
.marka.marka-icon-angle-double i:nth-child(4) {
-webkit-transform: rotate(135deg) scale(0.4, 0.1) translate(40%, -10%);
-moz-transform: rotate(135deg) scale(0.4, 0.1) translate(40%, -10%);
-ms-transform: rotate(135deg) scale(0.4, 0.1) translate(40%, -10%);
-o-transform: rotate(135deg) scale(0.4, 0.1) translate(40%, -10%);
transform: rotate(135deg) scale(0.4, 0.1) translate(40%, -10%);
}
<|start_filename|>src/css/icons/icon-pause.css<|end_filename|>
/* Pause icon */
.marka.marka-icon-pause i {
-webkit-transform: rotate(90deg) scale(0.8, 0.35) translate(0%, 65%);
-moz-transform: rotate(90deg) scale(0.8, 0.35) translate(0%, 65%);
-ms-transform: rotate(90deg) scale(0.8, 0.35) translate(0%, 65%);
-o-transform: rotate(90deg) scale(0.8, 0.35) translate(0%, 65%);
transform: rotate(90deg) scale(0.8, 0.35) translate(0%, 65%);
}
.marka.marka-icon-pause i:nth-child(2) {
-webkit-transform: rotate(90deg) scale(0.8, 0.35) translate(0%, -65%);
-moz-transform: rotate(90deg) scale(0.8, 0.35) translate(0%, -65%);
-ms-transform: rotate(90deg) scale(0.8, 0.35) translate(0%, -65%);
-o-transform: rotate(90deg) scale(0.8, 0.35) translate(0%, -65%);
transform: rotate(90deg) scale(0.8, 0.35) translate(0%, -65%);
}
<|start_filename|>bower.json<|end_filename|>
{
"name": "marka",
"homepage": "https://github.com/fians/marka",
"authors": [
"<NAME> <<EMAIL>>"
],
"description": "Beautiful icon transformations",
"keywords": [
"icon",
"transform",
"css3"
],
"license": "MIT",
"main": "dist/js/marka.js",
"ignore": [
"**/.*",
"node_modules",
"bower_components",
"test",
"tests"
]
}
<|start_filename|>src/css/icons/icon-square-o-check.css<|end_filename|>
/* Square-O check icon */
.marka.marka-icon-square-o-check i {
border-radius: 0%;
-webkit-transform: rotate(-45deg) scale(0.5, 0.125) translate(7%, 35%);
-moz-transform: rotate(-45deg) scale(0.5, 0.125) translate(7%, 35%);
-ms-transform: rotate(-45deg) scale(0.5, 0.125) translate(7%, 35%);
-o-transform: rotate(-45deg) scale(0.5, 0.125) translate(7%, 35%);
transform: rotate(-45deg) scale(0.5, 0.125) translate(7%, 35%);
}
.marka.marka-icon-square-o-check i:nth-child(1) {
border-radius: 10%;
-webkit-transform: scale(0.8);
-moz-transform: scale(0.8);
-ms-transform: scale(0.8);
-o-transform: scale(0.8);
transform: scale(0.8);
}
.marka.marka-icon-square-o-check i:nth-child(2) {
border-radius: 10%;
-webkit-transform: scale(0.65);
-moz-transform: scale(0.65);
-ms-transform: scale(0.65);
-o-transform: scale(0.65);
transform: scale(0.65);
}
.marka.marka-icon-square-o-check i:nth-child(3) {
border-radius: 0%;
-webkit-transform: rotate(45deg) scale(0.3, 0.125) translate(-15%, 123%);
-moz-transform: rotate(45deg) scale(0.3, 0.125) translate(-15%, 123%);
-ms-transform: rotate(45deg) scale(0.3, 0.125) translate(-15%, 123%);
-o-transform: rotate(45deg) scale(0.3, 0.125) translate(-15%, 123%);
transform: rotate(45deg) scale(0.3, 0.125) translate(-15%, 123%);
}
<|start_filename|>src/css/icons/icon-square-plus.css<|end_filename|>
/* Square Plus icon */
.marka.marka-icon-square-plus i {
border-radius: 10%;
-webkit-transform: scale(0.8);
-moz-transform: scale(0.8);
-ms-transform: scale(0.8);
-o-transform: scale(0.8);
transform: scale(0.8);
}
.marka.marka-icon-square-plus i:last-child {
border-radius: 0%;
-webkit-transform: rotate(0deg) scale(0.5, 0.125);
-moz-transform: rotate(0deg) scale(0.5, 0.125);
-ms-transform: rotate(0deg) scale(0.5, 0.125);
-o-transform: rotate(0deg) scale(0.5, 0.125);
transform: rotate(0deg) scale(0.5, 0.125);
}
.marka.marka-icon-square-plus i:nth-last-child(2) {
border-radius: 0%;
-webkit-transform: rotate(90deg) scale(0.5, 0.125);
-moz-transform: rotate(90deg) scale(0.5, 0.125);
-ms-transform: rotate(90deg) scale(0.5, 0.125);
-o-transform: rotate(90deg) scale(0.5, 0.125);
transform: rotate(90deg) scale(0.5, 0.125);
}
<|start_filename|>src/css/icons/icon-signal-three.css<|end_filename|>
/* Signal icon (3 parts) */
.marka.marka-icon-signal-three-one i,
.marka.marka-icon-signal-three-two i,
.marka.marka-icon-signal-three i {
-webkit-transform: rotate(90deg) scale(0.24, 0.2) translate(120%, 140%);
-moz-transform: rotate(90deg) scale(0.24, 0.2) translate(120%, 140%);
-ms-transform: rotate(90deg) scale(0.24, 0.2) translate(120%, 140%);
-o-transform: rotate(90deg) scale(0.24, 0.2) translate(120%, 140%);
transform: rotate(90deg) scale(0.24, 0.2) translate(120%, 140%);
}
.marka.marka-icon-signal-three-two i:nth-child(2),
.marka.marka-icon-signal-three-two i:nth-child(3),
.marka.marka-icon-signal-three i:nth-child(2) {
-webkit-transform: rotate(90deg) scale(0.48, 0.2) translate(35%, 0%);
-moz-transform: rotate(90deg) scale(0.48, 0.2) translate(35%, 0%);
-ms-transform: rotate(90deg) scale(0.48, 0.2) translate(35%, 0%);
-o-transform: rotate(90deg) scale(0.48, 0.2) translate(35%, 0%);
transform: rotate(90deg) scale(0.48, 0.2) translate(35%, 0%);
}
.marka.marka-icon-signal-three i:nth-child(3) {
-webkit-transform: rotate(90deg) scale(0.8, 0.2) translate(0%, -140%);
-moz-transform: rotate(90deg) scale(0.8, 0.2) translate(0%, -140%);
-ms-transform: rotate(90deg) scale(0.8, 0.2) translate(0%, -140%);
-o-transform: rotate(90deg) scale(0.8, 0.2) translate(0%, -140%);
transform: rotate(90deg) scale(0.8, 0.2) translate(0%, -140%);
}
| gizmowiki/gizmowiki.github.io |
<|start_filename|>test/basic.not_preserve.expect.css<|end_filename|>
test {
font: italic bold 12px/30px -apple-system, Segoe UI, Roboto, Noto Sans, Ubuntu, Cantarell, Helvetica Neue, sans-serif;
font: italic bold 12px/30px -apple-system, Segoe UI, Roboto, Noto Sans, Ubuntu, Cantarell, Helvetica Neue, sans-serif;
font: italic bold 12px/30px sans-serif;
font-family: -apple-system, Segoe UI, Roboto, Noto Sans, Ubuntu, Cantarell, Helvetica Neue, sans-serif;
font-family: -apple-system, Segoe UI, Roboto, Noto Sans, Ubuntu, Cantarell, Helvetica Neue, sans-serif;
font-family: -apple-system, Segoe UI, Roboto, Noto Sans, Ubuntu, Cantarell, Helvetica Neue;
font-family: sans-serif;
font-family: "Droid Sans", -apple-system, Segoe UI, Roboto, Noto Sans, Ubuntu, Cantarell, Helvetica Neue, sans-serif;
}
<|start_filename|>package.json<|end_filename|>
{
"name": "postcss-font-family-system-ui",
"version": "5.0.0",
"description": "Use the system-ui font family in CSS",
"author": "<NAME> <<EMAIL>>",
"contributors": [
"<NAME> <<EMAIL>>",
"<NAME> <<EMAIL>>"
],
"license": "CC0-1.0",
"repository": "JLHwung/postcss-font-family-system-ui",
"homepage": "https://github.com/JLHwung/postcss-font-family-system-ui#readme",
"bugs": "https://github.com/JLHwung/postcss-font-family-system-ui/issues",
"main": "./dist/index.bundle.cjs",
"type": "module",
"exports": {
"import": "./dist/index.bundle.mjs",
"require": "./dist/index.bundle.cjs"
},
"files": [
"dist/"
],
"scripts": {
"build": "rollup -c .rollup.mjs",
"check:js": "eslint *.[cm]js --cache --ignore-path .gitignore --quiet",
"check:spelling": "yaspeller -c package.json --lang en --ignore-uppercase --ignore-roman-numerals --ignore-urls README.md",
"prepublishOnly": "yarn test",
"check": "yarn check:js && yarn check:spelling",
"test": "yarn build && echo 'Running tests...'; yarn check && yarn test:tape",
"test:tape": "postcss-tape --config .tape.cjs"
},
"dependencies": {
"browserslist": "^4.0.0",
"caniuse-lite": "^1.0.30000655"
},
"devDependencies": {
"@babel/core": "7.12.13",
"@babel/eslint-parser": "7.12.13",
"@babel/preset-env": "7.12.13",
"@rollup/plugin-babel": "5.2.3",
"ajv": "7.0.4",
"eslint": "7.19.0",
"eslint-config-dev": "2.0.0",
"postcss": "8.2.4",
"postcss-tape": "6.0.0",
"pre-commit": "1.2.2",
"rollup": "2.38.2",
"yaspeller": "7.0.0"
},
"peerDependencies": {
"postcss": "^8.0.0"
},
"eslintConfig": {
"extends": "dev",
"parser": "@babel/eslint-parser"
},
"keywords": [
"postcss",
"css",
"postcss-plugin",
"ua",
"user",
"agent",
"platform",
"system",
"ui",
"default",
"font",
"font-family",
"family",
"face",
"type",
"typeface"
],
"yaspeller": {
"dictionary": [
"Affirmer",
"Affirmer's",
"babili",
"babelrc",
"browserslist",
"caniuse",
"devDependencies",
"env",
"es6",
"esm",
"Gulpfile",
"greenkeeper",
"Gruntfile",
"istanbul",
"js",
"lockfile",
"npm",
"nyc",
"postcss",
"PostCSS",
"polyfill",
"Refactors",
"rollup",
"std",
"sublicensable",
"travis",
"ui",
"yaspeller",
"^[A-f0-9]{8}$"
]
}
}
<|start_filename|>test/basic.browser.chrome_60.not_preserve.expect.css<|end_filename|>
test {
font: italic bold 12px/30px system-ui, Helvetica Neue, sans-serif;
font: italic bold 12px/30px system-ui, sans-serif;
font: italic bold 12px/30px sans-serif;
font-family: system-ui, Helvetica Neue, sans-serif;
font-family: system-ui, sans-serif;
font-family: system-ui;
font-family: sans-serif;
font-family: "Droid Sans", system-ui, sans-serif;
}
| JLHwung/postcss-font-family-system-ui |
<|start_filename|>TestBigDecimal/TestBigDecimalFunctions.cs<|end_filename|>
using System;
using System.Numerics;
using ExtendedNumerics;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace TestBigDecimal
{
[TestClass]
public class TestBigDecimalFunctions
{
private TestContext m_testContext;
public TestContext TestContext { get { return m_testContext; } set { m_testContext = value; } }
[ClassInitialize()]
public static void Initialize(TestContext context)
{
BigDecimal.Precision = 5000;
BigDecimal.AlwaysTruncate = false;
}
[TestProperty("Basic", "Properties")]
[TestMethod]
public void TestGetLength()
{
BigDecimal expectedResult = BigDecimal.Parse("2268507702394854741827137539360680923314");
BigDecimal value = new BigDecimal(BigInteger.Parse("22685077023948547418271375393606809233149150201282920942551781108927727789384397020382853"), -49);
BigDecimal result = value.WholeValue;
Assert.AreEqual(expectedResult, result);
}
[TestProperty("Basic", "Properties")]
[TestMethod]
public void TestSignifigantDigits()
{
int expectedResult1 = 19;
int expectedResult2 = 9;
BigDecimal number1 = new BigDecimal(12345678901234567890, -10);
BigDecimal number2 = new BigDecimal(123456789, 1);
int result1 = number1.SignifigantDigits;
int result2 = number2.SignifigantDigits;
Assert.AreEqual(expectedResult1, result1);
Assert.AreEqual(expectedResult2, result2);
}
[TestProperty("Basic", "Properties")]
[TestMethod]
public void TestGetWholeValue()
{
BigDecimal expectedResult = BigDecimal.Parse("2268507702394854741827137539360680923314");
BigDecimal value = new BigDecimal(BigInteger.Parse("22685077023948547418271375393606809233149150201282920942551781108927727789384397020382853"), -49);
BigDecimal result = value.WholeValue;
Assert.AreEqual(expectedResult.ToString(), result.ToString());
}
[TestProperty("Basic", "Properties")]
[TestMethod]
public void TestGetFractionalPart()
{
BigDecimal expectedResult = new BigDecimal(BigInteger.Parse("9150201282920942551781108927727789384397020382853"), -49);
BigDecimal value = new BigDecimal(BigInteger.Parse("22685077023948547418271375393606809233149150201282920942551781108927727789384397020382853"), -49);
BigDecimal result = value.GetFractionalPart();
Assert.AreEqual<BigDecimal>(expectedResult, result);
}
[TestProperty("Basic", "Operations")]
[TestMethod]
public void TestRounding()
{
BigDecimal bd = BigDecimal.Parse("10000000000000000000000000000000000000000000000000001");
BigDecimal up = BigDecimal.Parse(0.50001);
BigDecimal down = BigDecimal.Parse(0.49);
BigDecimal oneAndAhalf = BigDecimal.Parse("1.5");
BigDecimal negEightPointFive = BigDecimal.Parse(-8.5);
BigDecimal negNinePointFive = -9.5d;
BigDecimal threePointFourNine = BigDecimal.Parse(3.49);
BigDecimal threePointFiveOne = BigDecimal.Parse(3.51);
BigDecimal sixPointFive = BigDecimal.Parse(6.5);
BigInteger one = BigDecimal.Round(up);
BigInteger zero = BigDecimal.Round(down);
BigInteger two = BigDecimal.Round(oneAndAhalf);
BigInteger three = BigDecimal.Round(threePointFourNine);
BigInteger four = BigDecimal.Round(threePointFiveOne);
BigInteger six = BigDecimal.Round(sixPointFive, MidpointRounding.ToEven);
BigInteger negEight = BigDecimal.Round(negEightPointFive, MidpointRounding.ToEven);
BigInteger negNine = BigDecimal.Round(negEightPointFive, MidpointRounding.AwayFromZero);
BigInteger negTen = BigDecimal.Round(negNinePointFive, MidpointRounding.ToEven);
Assert.AreEqual(BigInteger.One, one);
Assert.AreEqual(BigInteger.Zero, zero);
Assert.AreEqual(2, two);
Assert.AreEqual(3, three);
Assert.AreEqual(4, four);
Assert.AreEqual(6, six);
Assert.AreEqual(-8, negEight);
Assert.AreEqual(-9, negNine);
Assert.AreEqual(-10, negTen);
}
[TestProperty("Basic", "Properties")]
[TestMethod]
public void TestGetSign()
{
BigDecimal zero1 = 0;
BigDecimal zero2 = new BigDecimal();
BigDecimal zero3 = new BigDecimal(0);
BigDecimal zero4 = new BigDecimal(BigInteger.Zero);
BigDecimal zero5 = new BigDecimal(0, -1);
BigDecimal zero6 = BigInteger.Subtract(BigInteger.Add(BigInteger.Divide(2, 3), BigInteger.Multiply(-1, BigInteger.Divide(1, 3))), BigInteger.Divide(1, 3));
BigDecimal oneTenth = BigDecimal.Divide(BigDecimal.One, new BigDecimal(10));
BigDecimal pointZeroOne = 0.1d;
BigDecimal zero7 = BigDecimal.Subtract(oneTenth, pointZeroOne);
BigDecimal zero8 = BigDecimal.Add((new BigDecimal(1, -1)), ((double)-1 / 10));
BigDecimal zero9 = (new BigDecimal(15274, -7) * 0);
BigDecimal positive1 = 1;
BigDecimal positive2 = -1 * -1;
BigDecimal negative1 = BigDecimal.Multiply(BigDecimal.One, BigDecimal.MinusOne);
BigDecimal negative2 = BigDecimal.Subtract(BigDecimal.Zero, 3);
BigDecimal negative3 = BigInteger.Subtract(0, 3);
BigDecimal negative4 = 10 * -1;
Assert.AreEqual(0, zero1.Sign, "0");
Assert.AreEqual(0, zero2.Sign, "new BigDecimal()");
Assert.AreEqual(0, zero3.Sign, "new BigDecimal(0);");
Assert.AreEqual(0, zero4.Sign, "new BigDecimal(BigInteger.Zero)");
Assert.AreEqual(0, zero5.Sign, "new BigDecimal(0, -1);");
Assert.AreEqual(0, zero6.Sign, "2/3 -1/3 - 1/3");
Assert.AreEqual(0, zero7.Sign, "1/10 - 1/10");
Assert.AreEqual(0, zero8.Sign, "1 + -1/10");
Assert.AreEqual(0, zero9.Sign, "0.0015274");
Assert.AreEqual(1, positive1.Sign, "1");
Assert.AreEqual(1, positive2.Sign, "-1 * 1");
Assert.AreEqual(BigInteger.MinusOne, negative1.Sign, "1 * -1");
Assert.AreEqual(BigInteger.MinusOne, negative2.Sign, "0 - 3");
Assert.AreEqual(BigInteger.MinusOne, negative3.Sign, "BigInteger.Subtract(0, 3)");
Assert.AreEqual(BigInteger.MinusOne, negative4.Sign, "10 * -1;");
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestLCD()
{
BigDecimal expectedResult = BigDecimal.Parse("45319990731015");
BigDecimal result = BigIntegerHelper.LCM(
new BigInteger[] {
3, 5, 7, 11, 13, 101, 307, 311, 313
});
// 15015,
// lcm(3, 5, 7, 11, 13, 101, 307, 311, 313) = 45319990731015
// lcm(4973, 4292, 4978, 4968, 4297, 4287) = 2822891742340306560
Assert.AreEqual(expectedResult, result);
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestGCD()
{
BigDecimal expectedResult = BigDecimal.Parse("10");
BigDecimal result = BigIntegerHelper.GCD( new BigInteger[] { 20, 30, 210, 310, 360, 5040, 720720 });
Assert.AreEqual(expectedResult, result);
}
[TestProperty("Basic", "Factory")]
[TestMethod]
public void TestIrrational001()
{
BigDecimal goldenRatio = BigDecimal.Parse("1.6180339887498948482045868343656381177203091798057628621354486227052604628189024497072072041893911374847540880753868917521266338622235369317931800607667263544333890865959395829056383226613199282902678806752087668925017116962070322210432162695486262963136144381497587012203408058879544547492461856953648644492");
TestContext.WriteLine("");
TestContext.WriteLine(goldenRatio.ToString());
TestContext.WriteLine("");
TestContext.WriteLine("");
}
[TestProperty("Basic", "Factory")]
[TestMethod]
public void TestIrrational002()
{
BigDecimal goldenRatio = BigDecimal.Parse("1.6180339887498948482045868343656381177203091798057628621354486227052604628189024497072072041893911374847540880753868917521266338622235369317931800607667263544333890865959395829056383226613199282902678806752087668925017116962070322210432162695486262963136144381497587012203408058879544547492461856953648644492");
TestContext.WriteLine("");
TestContext.WriteLine(goldenRatio.ToString());
TestContext.WriteLine("");
TestContext.WriteLine("");
}
}
}
<|start_filename|>BigDecimal/BigIntegerHelper.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Numerics;
using System.Text;
using System.Threading.Tasks;
namespace ExtendedNumerics
{
/// <summary>
/// Sqrt and NRoot acquired from http://mjs5.com/2016/01/20/c-biginteger-helper-constructors
/// </summary>
public static class BigIntegerHelper
{
public static IEnumerable<BigInteger> GetRange(BigInteger min, BigInteger max)
{
BigInteger counter = min;
while (counter < max)
{
yield return counter;
counter++;
}
yield break;
}
public static bool IsCoprime(BigInteger value1, BigInteger value2)
{
return GCD(value1, value2) == 1;
}
public static BigInteger GCD(IEnumerable<BigInteger> numbers)
{
return numbers.Aggregate(GCD);
}
public static BigInteger LCM(IEnumerable<BigInteger> numbers)
{
return numbers.Aggregate(LCM);
}
public static BigInteger LCM(BigInteger num1, BigInteger num2)
{
BigInteger absValue1 = BigInteger.Abs(num1);
BigInteger absValue2 = BigInteger.Abs(num2);
return (absValue1 * absValue2) / GCD(absValue1, absValue2);
}
public static BigInteger GCD(BigInteger value1, BigInteger value2)
{
BigInteger absValue1 = BigInteger.Abs(value1);
BigInteger absValue2 = BigInteger.Abs(value2);
while (absValue1 != 0 && absValue2 != 0)
{
if (absValue1 > absValue2)
{
absValue1 %= absValue2;
}
else
{
absValue2 %= absValue1;
}
}
return BigInteger.Max(absValue1, absValue2);
}
public static BigInteger Clone(this BigInteger source)
{
return new BigInteger(source.ToByteArray());
}
public static int GetLength(this BigInteger source)
{
int result = 0;
BigInteger copy = source.Clone();
while (copy > 0)
{
copy /= 10;
result++;
}
return result;
}
public static BigInteger Square(this BigInteger input)
{
return input * input;
}
public static BigInteger SquareRoot(BigInteger input)
{
if (input.IsZero)
{
return new BigInteger(0);
}
BigInteger n = new BigInteger(0);
BigInteger p = new BigInteger(0);
BigInteger low = new BigInteger(0);
BigInteger high = BigInteger.Abs(input);
while (high > low + 1)
{
n = (high + low) >> 1;
p = n * n;
if (input < p)
{
high = n;
}
else if (input > p)
{
low = n;
}
else
{
break;
}
}
return input == p ? n : low;
}
// Returns the NTHs root of a BigInteger with Remainder.
// The root must be greater than or equal to 1 or value must be a positive integer.
public static BigInteger NthRoot(this BigInteger value, int root, ref BigInteger remainder)
{
if (root < 1)
{
throw new Exception("root must be greater than or equal to 1");
}
if (value.Sign == -1)
{
throw new Exception("value must be a positive integer");
}
if (value == BigInteger.One)
{
remainder = 0;
return BigInteger.One;
}
if (value == BigInteger.Zero)
{
remainder = 0;
return BigInteger.Zero;
}
if (root == 1)
{
remainder = 0;
return value;
}
var upperbound = value;
var lowerbound = BigInteger.Zero;
while (true)
{
var nval = (upperbound + lowerbound) >> 1;
var tstsq = BigInteger.Pow(nval, root);
if (tstsq > value)
{
upperbound = nval;
}
if (tstsq < value)
{
lowerbound = nval;
}
if (tstsq == value)
{
lowerbound = nval;
break;
}
if (lowerbound == upperbound - 1)
{
break;
}
}
remainder = value - BigInteger.Pow(lowerbound, root);
return lowerbound;
}
}
}
<|start_filename|>TestBigDecimal/TestBigDecimalConversion.cs<|end_filename|>
using System;
using System.Numerics;
using ExtendedNumerics;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace TestBigDecimal
{
[TestClass]
public class TestBigDecimalConversion
{
private TestContext m_testContext;
public TestContext TestContext { get { return m_testContext; } set { m_testContext = value; } }
[ClassInitialize()]
public static void Initialize(TestContext context)
{
BigDecimal.Precision = 5000;
BigDecimal.AlwaysTruncate = false;
}
[TestProperty("Basic", "Conversion")]
[TestMethod]
public void TestConversionFromBigInteger()
{
BigDecimal expectedResult = BigDecimal.Parse("22685077023948547418271375393606809233149150201282920942551781108927727789384397020382853");
BigDecimal result = (BigDecimal)BigInteger.Parse("22685077023948547418271375393606809233149150201282920942551781108927727789384397020382853"); ;
Assert.AreEqual(expectedResult, result);
}
[TestProperty("Basic", "Conversion")]
[TestMethod]
public void TestConversionToBigInteger()
{
BigInteger expectedResult = BigInteger.Parse("213212221322233233332232232223");
BigInteger result = (BigInteger)BigDecimal.Parse("213212221322233233332232232223");
Assert.AreEqual(expectedResult, result);
}
}
}
<|start_filename|>TestBigDecimal/TestBigDecimalOperations.cs<|end_filename|>
using System;
using System.Numerics;
using ExtendedNumerics;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace TestBigDecimal
{
[TestClass]
public class TestBigDecimalOperations
{
private TestContext m_testContext;
public TestContext TestContext { get { return m_testContext; } set { m_testContext = value; } }
[ClassInitialize()]
public static void Initialize(TestContext context)
{
BigDecimal.Precision = 5000;
BigDecimal.AlwaysTruncate = false;
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestNegate()
{
string expected = "-1.375";
BigDecimal result = BigDecimal.Negate((double)(1.375));
Assert.AreEqual(expected, result.ToString());
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestAddition()
{
BigDecimal number1 = BigDecimal.Parse("1234567890");
BigDecimal expectedResult = BigDecimal.Parse("3382051537");
BigDecimal result = number1 + 2147483647;
Assert.AreEqual(expectedResult, result);
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestSubtraction()
{
BigDecimal number = BigDecimal.Parse("4294967295");
BigDecimal expectedResult = BigDecimal.Parse("2147483648");
BigDecimal result = number - 2147483647;
Assert.AreEqual(expectedResult, result);
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestMultiply()
{
BigDecimal expectedResult1 = BigDecimal.Parse("35794234179725868774991807832568455403003778024228226193532908190484670252364677411513516111204504060317568667");
BigDecimal expectedResult2 = BigDecimal.Parse("37484040009320200288159018961010536937973891182532366282540247408867702983313960194873589374267102044942786001");
BigDecimal expectedResult3 = new BigDecimal(BigInteger.Negate(BigInteger.Parse("61199804023616162130466158636504166524066189692091806226423722790866248079929810268920239053350152436663869784")));
//"6119980402361616213046615863650416652406618969209180622642372279086624807992.9810268920239053350152436663869784"
expectedResult3.Truncate();
expectedResult3.Normalize();
BigDecimal p = BigDecimal.Parse("6122421090493547576937037317561418841225758554253106999");
BigDecimal q = BigDecimal.Parse("5846418214406154678836553182979162384198610505601062333");
BigDecimal result1 = BigDecimal.Multiply(p, q);
BigDecimal result2 = p * p;
BigDecimal result3 = -1 * p * new BigDecimal(BigInteger.Parse("9996013524558575221488141657137307396757453940901242216"), -34);
// -1 * 6122421090493547576937037317561418841225758554253106999 * 999601352455857522148.8141657137307396757453940901242216
// = -6119980402361616213046615863650416652406618969209180622642372279086624807992.9810268920239053350152436663869784
// 9996013524558575221488141657137307396757453940901242216
bool matches1 = expectedResult1.Equals(result1);
bool matches2 = expectedResult2.Equals(result2);
bool matches3 = expectedResult3.ToString().Equals(result3.ToString().Replace(".", ""));
Assert.IsTrue(matches1);
Assert.IsTrue(matches2);
Assert.IsTrue(matches3);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide000()
{
BigDecimal expectedResult = BigDecimal.Parse("7");
BigDecimal dividend = BigDecimal.Parse("0.63");
BigDecimal divisor = BigDecimal.Parse("0.09");
BigDecimal result = BigDecimal.Divide(dividend, divisor);
string expected = expectedResult.ToString();
string actual = result.ToString();
Assert.AreEqual(expected, actual);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide001()
{
BigDecimal expectedResult = BigDecimal.Parse("40094690950920881030683735292761468389214899724061");
BigDecimal dividend = BigDecimal.Parse("1522605027922533360535618378132637429718068114961380688657908494580122963258952897654000350692006139");
BigDecimal divisor = BigDecimal.Parse("37975227936943673922808872755445627854565536638199");
BigDecimal result = BigDecimal.Divide(dividend, divisor);
string expected = expectedResult.ToString();
string actual = result.ToString();
Assert.AreEqual(expected, actual);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide002()
{
string expectedResultDividend = "0.001";
string expectedResultDivisor = "0.5";
string expectedQuotientResult = "0.002";
BigDecimal resultDividend = BigDecimal.Parse(expectedResultDividend);
BigDecimal resultDivisor = BigDecimal.Parse(expectedResultDivisor);
resultDividend.Normalize();
resultDivisor.Normalize();
BigDecimal quotientResult = BigDecimal.Divide(resultDividend, resultDivisor);
quotientResult.Normalize();
string actualDividend = resultDividend.ToString();
string actualDivisor = resultDivisor.ToString();
string actualQuotientResult = quotientResult.ToString();
Assert.AreEqual(expectedResultDividend, actualDividend);
Assert.AreEqual(expectedResultDivisor, actualDivisor);
Assert.AreEqual(expectedQuotientResult, actualQuotientResult);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide003()
{
string expected = "1.10367421348286";
BigDecimal divisor = BigDecimal.Parse("0.90606447789");
BigDecimal result = BigDecimal.Divide(BigDecimal.One, divisor);
result.Truncate(100);
string actual = result.ToString();
Assert.AreEqual(expected, actual);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide004()
{
BigDecimal expectedResult = BigDecimal.Parse("0.05");
BigDecimal one = new BigDecimal(1);
BigDecimal twenty = new BigDecimal(20);
BigDecimal result = BigDecimal.Divide(one, twenty);
string expected = expectedResult.ToString();
string actual = result.ToString();
Assert.AreEqual(expected, actual);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide005A()
{
BigDecimal expectedResult3 = BigDecimal.Parse("50");
BigDecimal result3 = BigDecimal.Divide(BigDecimal.Parse("0.5"), BigDecimal.Parse("0.01"));
string expected3 = expectedResult3.ToString();
string actual3 = result3.ToString();
Assert.AreEqual(expected3, actual3);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide005B()
{
BigDecimal expectedResult3 = BigDecimal.Parse("5");
BigDecimal result3 = BigDecimal.Divide(BigDecimal.Parse("0.5"), BigDecimal.Parse("0.1"));
string expected3 = expectedResult3.ToString();
string actual3 = result3.ToString();
Assert.AreEqual(expected3, actual3);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide005C()
{
BigDecimal expectedResult3 = BigDecimal.Parse("5");
BigDecimal result3 = BigDecimal.Divide(BigDecimal.Parse("0.05"), BigDecimal.Parse("0.01"));
string expected3 = expectedResult3.ToString();
string actual3 = result3.ToString();
Assert.AreEqual(expected3, actual3);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestDivide005D()
{
BigDecimal expectedResult3 = BigDecimal.Parse("0.5");
BigDecimal result3 = BigDecimal.Divide(BigDecimal.Parse("0.05"), BigDecimal.Parse("0.1"));
string expected3 = expectedResult3.ToString();
string actual3 = result3.ToString();
Assert.AreEqual(expected3, actual3);
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestReciprocal001()
{
// 1 / 3 = 0.333333333333333
BigDecimal expectedResult = BigDecimal.Parse("0.333333333333333");
BigDecimal dividend = new BigDecimal(1);
BigDecimal divisor = new BigDecimal(3);
BigDecimal result = BigDecimal.Divide(dividend, divisor);
string expected = expectedResult.ToString();
string actual = result.ToString();
Assert.AreEqual(expected, actual);
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestReciprocal003()
{
// 1/0.0833333333333333 == 12
BigDecimal expectedResult = BigDecimal.Parse("12");
BigDecimal dividend = new BigDecimal(1);
BigDecimal divisor = BigDecimal.Parse("0.0833333333333333");
BigDecimal result = BigDecimal.Divide(dividend, divisor);
string expected = expectedResult.ToString();
string actual = result.ToString();
Assert.AreEqual(expected, actual);
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestReciprocal004()
{
// 2/0.63661977236758 == 3.1415926535898
BigDecimal expectedResult = BigDecimal.Parse("3.14159265358970");
BigDecimal dividend = new BigDecimal(2);
BigDecimal divisor = BigDecimal.Parse("0.63661977236758");
BigDecimal result = BigDecimal.Divide(dividend, divisor);
string expected = expectedResult.ToString();
string actual = result.ToString();
Assert.AreEqual(expected, actual);
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestReciprocal002()
{
// 1/2 = 0.5
BigDecimal expectedResult = BigDecimal.Parse("0.5");
BigDecimal dividend = new BigDecimal(1);
BigDecimal divisor = new BigDecimal(2);
BigDecimal result = BigDecimal.Divide(dividend, divisor);
string expected = expectedResult.ToString();
string actual = result.ToString();
Assert.AreEqual(expected, actual);
}
//[Ignore]
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestMod()
{
BigDecimal expectedResult1 = 12;
BigDecimal expectedResult2 = 0;
BigDecimal expectedResult3 = 1;
//BigDecimal expectedResult4 = 1.66672;
// 31 % 19 = 12
BigDecimal dividend1 = 31;
BigDecimal divisor1 = 19;
// 1891 %31 = 0
BigDecimal dividend2 = 1891;
BigDecimal divisor2 = 31;
// 6661 % 60 = 1
BigDecimal dividend3 = 6661;
BigDecimal divisor3 = 60;
// 31 % 3.66666 = 1.66672
//BigDecimal dividend4 = 31;
//BigDecimal divisor4 = 3.66666;
BigDecimal result1 = BigDecimal.Mod(dividend1, divisor1);
BigDecimal result2 = BigDecimal.Mod(dividend2, divisor2);
BigDecimal result3 = BigDecimal.Mod(dividend3, divisor3);
//BigDecimal result4 = BigDecimal.Mod(dividend4,divisor4);
Assert.AreEqual(expectedResult1, result1);
Assert.AreEqual(expectedResult2, result2);
Assert.AreEqual(expectedResult3, result3);
//Assert.AreEqual(expectedResult4, result4);
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestCeiling001()
{
string expectedCeiling = "4";
string expectedStart = "3.14159265";
BigDecimal start = BigDecimal.Parse(expectedStart);
string actualStart = start.ToString();
Assert.AreEqual(expectedStart, actualStart);
BigDecimal ceiling = BigDecimal.Ceiling(start);
string actualCeiling = ceiling.ToString();
Assert.AreEqual(expectedCeiling, actualCeiling, $"ceiling({expectedStart}) == {expectedCeiling}");
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestCeiling002()
{
string expectedCeiling = "-3";
string expectedStart = "-3.14159265";
BigDecimal start = BigDecimal.Parse(expectedStart);
string actualStart = start.ToString();
Assert.AreEqual(expectedStart, actualStart);
BigDecimal ceiling = BigDecimal.Ceiling(start);
string actualCeiling = ceiling.ToString();
Assert.AreEqual(expectedCeiling, actualCeiling, $"ceiling({expectedStart}) == {expectedCeiling}");
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestCeiling003()
{
string expectedCeiling = "1";
string expectedStart = "0.14159265";
BigDecimal start = BigDecimal.Parse(expectedStart);
string actualStart = start.ToString();
Assert.AreEqual(expectedStart, actualStart);
BigDecimal ceiling = BigDecimal.Ceiling(start);
string actualCeiling = ceiling.ToString();
Assert.AreEqual(expectedCeiling, actualCeiling, $"ceiling({expectedStart}) == {expectedCeiling}");
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestCeiling004()
{
string expectedCeiling = "0";
string expectedStart = "-0.14159265";
BigDecimal start = BigDecimal.Parse(expectedStart);
string actualStart = start.ToString();
Assert.AreEqual(expectedStart, actualStart);
BigDecimal ceiling = BigDecimal.Ceiling(start);
string actualCeiling = ceiling.ToString();
Assert.AreEqual(expectedCeiling, actualCeiling, $"ceiling({expectedStart}) == {expectedCeiling}");
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestFloor001()
{
string expectedFloor = "3";
string expectedStart = "3.14159265";
BigDecimal start = BigDecimal.Parse(expectedStart);
string actualStart = start.ToString();
Assert.AreEqual(expectedStart, actualStart);
BigDecimal floor = BigDecimal.Floor(start);
string actualFloor = floor.ToString();
Assert.AreEqual(expectedFloor, actualFloor, $"ceiling({expectedStart}) == {expectedFloor}");
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestFloor002()
{
string expectedFloor = "-4";
string expectedStart = "-3.14159265";
BigDecimal start = BigDecimal.Parse(expectedStart);
string actualStart = start.ToString();
Assert.AreEqual(expectedStart, actualStart);
BigDecimal floor = BigDecimal.Floor(start);
string actualFloor = floor.ToString();
Assert.AreEqual(expectedFloor, actualFloor, $"ceiling({expectedStart}) == {expectedFloor}");
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestFloor003()
{
string expectedFloor = "-1";
string expectedStart = "-0.14159265";
BigDecimal start = BigDecimal.Parse(expectedStart);
string actualStart = start.ToString();
Assert.AreEqual(expectedStart, actualStart);
BigDecimal floor = BigDecimal.Floor(start);
string actualFloor = floor.ToString();
Assert.AreEqual(expectedFloor, actualFloor, $"ceiling({expectedStart}) == {expectedFloor}");
}
[TestProperty("Arithmetic", "Divide")]
[TestMethod]
public void TestFloor004()
{
string expectedFloor = "0";
string expectedStart = "0.14159265";
BigDecimal start = BigDecimal.Parse(expectedStart);
string actualStart = start.ToString();
Assert.AreEqual(expectedStart, actualStart);
BigDecimal floor = BigDecimal.Floor(start);
string actualFloor = floor.ToString();
Assert.AreEqual(expectedFloor, actualFloor, $"ceiling({expectedStart}) == {expectedFloor}");
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestBigDecimalPow()
{
BigDecimal expectedResult = BigDecimal.Parse("268637376395543538746286686601216000000000000");
// 5040 ^ 12 = 268637376395543538746286686601216000000000000
BigDecimal number = BigDecimal.Parse("5040");
BigDecimal result = BigDecimal.Pow(number, 12);
Assert.AreEqual(expectedResult, result, "5040 ^ 12 = 268637376395543538746286686601216000000000000");
}
[TestProperty("Arithmetic", "Operations")]
[TestMethod]
public void TestSqrt()
{
BigInteger expectedResult = BigInteger.Parse("8145408529");
// sqrt(66347680104305943841) = 8145408529
BigInteger squareNumber = BigInteger.Parse("66347680104305943841");
BigInteger remainder = new BigInteger();
BigInteger result = squareNumber.NthRoot(2, ref remainder);
Assert.AreEqual(expectedResult, result, "sqrt(66347680104305943841) = 8145408529");
}
}
}
| AdamWhiteHat/BigDecimal |
<|start_filename|>src/sampler.cpp<|end_filename|>
/*
Sampler
Copyright (c) 2019-2020, <NAME> and <NAME>. All rights reserved
Copyright (c) 2009-2018, <NAME>. All rights reserved.
Copyright (c) 2015, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME>
Copyright (c) 2014, <NAME>, <NAME>, <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#include <ctime>
#include <cstring>
#include <errno.h>
#include <algorithm>
#include <string.h>
#include <sstream>
#include <iostream>
#include <iomanip>
#include <map>
#include <set>
#include <fstream>
#include <sys/stat.h>
#include <string.h>
#include <list>
#include <array>
#include <cmath>
#include <complex>
#include "time_mem.h"
#include "cryptominisat5/cryptominisat.h"
#include "cryptominisat5/solvertypesmini.h"
#include "GitSHA1.h"
#include "sampler.h"
using std::cout;
using std::cerr;
using std::endl;
using std::list;
using std::map;
using ApproxMC::SolCount;
Hash Sampler::add_hash(uint32_t hash_index)
{
const string randomBits =
gen_rnd_bits(appmc->get_sampling_set().size(), hash_index);
vector<uint32_t> vars;
for (uint32_t j = 0; j < appmc->get_sampling_set().size(); j++) {
if (randomBits[j] == '1') {
vars.push_back(appmc->get_sampling_set()[j]);
}
}
solver->new_var();
const uint32_t act_var = solver->nVars()-1;
const bool rhs = gen_rhs();
Hash h(act_var, vars, rhs);
vars.push_back(act_var);
solver->add_xor_clause(vars, rhs);
if (conf.verb_banning_cls) {
print_xor(vars, rhs);
}
return h;
}
void Sampler::ban_one(const uint32_t act_var, const vector<lbool>& model)
{
vector<Lit> lits;
lits.push_back(Lit(act_var, false));
for (const uint32_t var: appmc->get_sampling_set()) {
lits.push_back(Lit(var, model[var] == l_True));
}
solver->add_clause(lits);
}
///adding banning clauses for repeating solutions
uint64_t Sampler::add_glob_banning_cls(
const HashesModels* hm
, const uint32_t act_var
, const uint32_t num_hashes)
{
if (hm == NULL)
return 0;
assert(act_var != std::numeric_limits<uint32_t>::max());
assert(num_hashes != std::numeric_limits<uint32_t>::max());
uint64_t repeat = 0;
vector<Lit> lits;
for (uint32_t i = 0; i < hm->glob_model.size(); i++) {
const SavedModel& sm = hm->glob_model[i];
//Model was generated with 'sm.hash_num' active
//We will have 'num_hashes' hashes active
if (sm.hash_num >= num_hashes) {
ban_one(act_var, sm.model);
repeat++;
} else if ((int)num_hashes - (int)sm.hash_num < 9) {
//Model has to fit all hashes
bool ok = true;
uint32_t checked = 0;
for(const auto& h: hm->hashes) {
//This hash is number: h.first
//Only has to match hashes below current need
//note that "h.first" is numbered from 0, so this is a "<" not "<="
if (h.first < num_hashes) {
checked++;
ok &= check_model_against_hash(h.second, sm.model);
if (!ok) break;
}
}
if (ok) {
//cout << "Found repeat model, had to check " << checked << " hashes" << endl;
ban_one(act_var, sm.model);
repeat++;
}
}
}
return repeat;
}
SolNum Sampler::bounded_sol_count(
uint32_t maxSolutions,
const vector<Lit>* assumps,
const uint32_t hashCount,
uint32_t minSolutions,
HashesModels* hm,
vector<vector<int>>* out_solutions
) {
if (conf.verb) {
cout << "c [unig] "
"[ " << std::setw(7) << std::setprecision(2) << std::fixed
<< (cpuTimeTotal()-startTime)
<< " ]"
<< " bounded_sol_count looking for " << std::setw(4) << maxSolutions << " solutions"
<< " -- hashes active: " << hashCount << endl;
}
//Will we need to extend the solution?
bool only_indep_sol = true;
if (out_solutions != NULL) {
only_indep_sol = conf.only_indep_samples;
}
//Turn off improvement from ApproxMC4 research paper
if (conf.force_sol_extension) {
only_indep_sol = false;
}
//Set up things for adding clauses that can later be removed
vector<Lit> new_assumps;
if (assumps) {
assert(assumps->size() == hashCount);
new_assumps = *assumps;
} else {
assert(hashCount == 0);
}
solver->new_var();
const uint32_t sol_ban_var = solver->nVars()-1;
new_assumps.push_back(Lit(sol_ban_var, true));
if (appmc->get_simplify() >= 2) {
if (conf.verb >= 2) {
cout << "c [unig] inter-simplifying" << endl;
}
double myTime = cpuTime();
solver->simplify(&new_assumps);
solver->set_verbosity(0);
total_inter_simp_time += cpuTime() - myTime;
if (conf.verb >= 1) {
cout << "c [unig] inter-simp finished, total simp time: "
<< total_inter_simp_time << endl;
}
}
const uint64_t repeat = add_glob_banning_cls(hm, sol_ban_var, hashCount);
uint64_t solutions = repeat;
double last_found_time = cpuTimeTotal();
vector<vector<lbool>> models;
while (solutions < maxSolutions) {
lbool ret = solver->solve(&new_assumps, only_indep_sol);
//COZ_PROGRESS_NAMED("one solution")
assert(ret == l_False || ret == l_True);
if (conf.verb >= 2) {
cout << "c [unig] bounded_sol_count ret: " << std::setw(7) << ret;
if (ret == l_True) {
cout << " sol no. " << std::setw(3) << solutions;
} else {
cout << " No more. " << std::setw(3) << "";
}
cout << " T: "
<< std::setw(7) << std::setprecision(2) << std::fixed
<< (cpuTimeTotal()-startTime)
<< " -- hashes act: " << hashCount
<< " -- T since last: "
<< std::setw(7) << std::setprecision(2) << std::fixed
<< (cpuTimeTotal()-last_found_time)
<< endl;
if (conf.verb >= 3) {
solver->print_stats();
}
last_found_time = cpuTimeTotal();
}
if (ret != l_True) {
break;
}
//Add solution to set
solutions++;
const vector<lbool> model = solver->get_model();
//#ifdef SLOW_DEBUG
check_model(model, hm, hashCount);
//#endif
models.push_back(model);
if (out_solutions) {
out_solutions->push_back(get_solution_ints(model));
}
//ban solution
vector<Lit> lits;
lits.push_back(Lit(sol_ban_var, false));
for (const uint32_t var: appmc->get_sampling_set()) {
assert(solver->get_model()[var] != l_Undef);
lits.push_back(Lit(var, solver->get_model()[var] == l_True));
}
if (conf.verb_banning_cls) {
cout << "c [unig] Adding banning clause: " << lits << endl;
}
solver->add_clause(lits);
}
if (solutions < maxSolutions) {
//Sampling -- output a random sample of N solutions
if (solutions >= minSolutions) {
assert(minSolutions > 0);
vector<size_t> modelIndices;
for (uint32_t i = 0; i < models.size(); i++) {
modelIndices.push_back(i);
}
std::shuffle(modelIndices.begin(), modelIndices.end(), randomEngine);
for (uint32_t i = 0; i < sols_to_return(solutions); i++) {
const auto& model = models.at(modelIndices.at(i));
(*callback_func)(get_solution_ints(model), callback_func_data);
}
}
}
//Save global models
if (hm && appmc->get_reuse_models()) {
for (const auto& model: models) {
hm->glob_model.push_back(SavedModel(hashCount, model));
}
}
//Remove solution banning
vector<Lit> cl_that_removes;
cl_that_removes.push_back(Lit(sol_ban_var, false));
solver->add_clause(cl_that_removes);
return SolNum(solutions, repeat);
}
void Sampler::sample(
Config _conf,
const ApproxMC::SolCount solCount,
const uint32_t num_samples)
{
conf = _conf;
solver = appmc->get_solver();
orig_num_vars = solver->nVars();
startTime = cpuTimeTotal();
openLogFile();
randomEngine.seed(appmc->get_seed());
if (conf.startiter > appmc->get_sampling_set().size()) {
cerr << "ERROR: Manually-specified startiter for gen_n_samples"
"is larger than the size of the independent set.\n" << endl;
exit(-1);
}
/* Compute threshold via formula from TACAS-15 paper */
threshold_Samplergen = ceil(4.03 * (1 + (1/conf.kappa)) * (1 + (1/conf.kappa)));
//No startiter, we have to figure it out
assert(conf.startiter == 0);
if (solCount.hashCount == 0 && solCount.cellSolCount == 0) {
cout << "c [unig] The input formula is unsatisfiable." << endl;
exit(-1);
}
double si = round(solCount.hashCount + log2(solCount.cellSolCount)
+ log2(1.8) - log2(threshold_Samplergen)) - 2;
if (si > 0) {
conf.startiter = si;
} else {
conf.startiter = 0; /* Indicate ideal sampling case */
}
generate_samples(num_samples);
}
vector<Lit> Sampler::set_num_hashes(
uint32_t num_wanted,
map<uint64_t, Hash>& hashes
) {
vector<Lit> assumps;
for(uint32_t i = 0; i < num_wanted; i++) {
if (hashes.find(i) != hashes.end()) {
assumps.push_back(Lit(hashes[i].act_var, true));
} else {
Hash h = add_hash(i);
assumps.push_back(Lit(h.act_var, true));
hashes[i] = h;
}
}
assert(num_wanted == assumps.size());
return assumps;
}
void Sampler::simplify()
{
if (conf.verb >= 1) {
cout << "c [unig] simplifying" << endl;
}
solver->set_sls(1);
solver->set_intree_probe(1);
solver->set_full_bve_iter_ratio(appmc->get_var_elim_ratio());
solver->set_full_bve(1);
solver->set_bva(1);
solver->set_distill(1);
solver->set_scc(1);
solver->simplify();
solver->set_sls(0);
solver->set_intree_probe(0);
solver->set_full_bve(0);
solver->set_bva(0);
solver->set_distill(0);
//solver->set_scc(0);
}
void Sampler::generate_samples(const uint32_t num_samples_needed)
{
double genStartTime = cpuTimeTotal();
hiThresh = ceil(1 + (1.4142136 * (1 + conf.kappa) * threshold_Samplergen));
loThresh = floor(threshold_Samplergen / (1.4142136 * (1 + conf.kappa)));
const uint32_t samplesPerCall = sols_to_return(num_samples_needed);
const uint32_t callsNeeded =
num_samples_needed / samplesPerCall + (bool)(num_samples_needed % samplesPerCall);
if (conf.verb) {
cout << "c [unig] Samples requested: " << num_samples_needed << endl;
cout << "c [unig] samples per XOR set:" << samplesPerCall << endl;
//cout << "c [unig] -> calls needed: " << callsNeeded << endl;
}
//TODO WARNING what is this 14???????????????????
uint32_t callsPerLoop = std::min(solver->nVars() / 14, callsNeeded);
callsPerLoop = std::max(callsPerLoop, 1U);
//cout << "c [unig] callsPerLoop:" << callsPerLoop << endl;
if (conf.verb) {
cout << "c [unig] starting sample generation."
<< " loThresh: " << loThresh
<< ", hiThresh: " << hiThresh
<< ", startiter: " << conf.startiter << endl;
}
uint32_t samples = 0;
if (conf.startiter > 0) {
uint32_t lastSuccessfulHashOffset = 0;
while(samples < num_samples_needed) {
samples += gen_n_samples(
callsPerLoop,
&lastSuccessfulHashOffset,
num_samples_needed);
}
} else {
/* Ideal sampling case; enumerate all solutions */
vector<vector<int> > out_solutions;
const uint32_t count = bounded_sol_count(
std::numeric_limits<uint32_t>::max() //max no. solutions
, NULL //assumps is empty
, 0 //number of hashes (information only)
, 1 //min num. solutions
, NULL //gobal model (would be banned)
, &out_solutions
).solutions;
assert(count > 0);
std::uniform_int_distribution<unsigned> uid {0, count-1};
for (uint32_t i = 0; i < num_samples_needed; ++i) {
auto it = out_solutions.begin();
for (uint32_t j = uid(randomEngine); j > 0; --j) // TODO improve hack
{
++it;
}
samples++;
(*callback_func)(*it, callback_func_data);
}
}
cout
<< "c [unig] Time to sample: "
<< cpuTimeTotal() - genStartTime
<< " s"
<< " -- Time count+samples: " << cpuTimeTotal() << " s"
<< endl;
cout << "c [unig] Samples generated: " << samples << endl;
}
uint32_t Sampler::gen_n_samples(
const uint32_t num_calls
, uint32_t* lastSuccessfulHashOffset
, const uint32_t num_samples_needed)
{
SparseData sparse_data(-1);
uint32_t num_samples = 0;
uint32_t i = 0;
while(i < num_calls) {
uint32_t hashOffsets[3];
hashOffsets[0] = *lastSuccessfulHashOffset;
//Specific values
if (hashOffsets[0] == 0) { // Starting at q-2; go to q-1 then q
hashOffsets[1] = 1;
hashOffsets[2] = 2;
}
if (hashOffsets[0] == 2) { // Starting at q; go to q-1 then q-2
hashOffsets[1] = 1;
hashOffsets[2] = 0;
}
map<uint64_t, Hash> hashes;
bool ok;
for (uint32_t j = 0; j < 3; j++) {
uint32_t currentHashOffset = hashOffsets[j];
uint32_t currentHashCount = currentHashOffset + conf.startiter;
const vector<Lit> assumps = set_num_hashes(currentHashCount, hashes);
double myTime = cpuTime();
const uint64_t solutionCount = bounded_sol_count(
hiThresh // max num solutions
, &assumps //assumptions to use
, currentHashCount
, loThresh //min number of solutions (samples not output otherwise)
).solutions;
ok = (solutionCount < hiThresh && solutionCount >= loThresh);
write_log(
true, //sampling
i, currentHashCount, solutionCount == hiThresh,
solutionCount, 0, cpuTime()-myTime);
if (ok) {
num_samples += sols_to_return(num_samples_needed);
*lastSuccessfulHashOffset = currentHashOffset;
break;
}
// Number of solutions too small or too large
// At q-1, and need to pick next hash count
if (j == 0 && currentHashOffset == 1) {
if (solutionCount < loThresh) {
// Go to q-2; next will be q
hashOffsets[1] = 0;
hashOffsets[2] = 2;
} else {
// Go to q; next will be q-2
hashOffsets[1] = 2;
hashOffsets[2] = 0;
}
}
}
if (ok) {
i++;
}
if (appmc->get_simplify() >= 1) {
simplify();
}
}
return num_samples;
}
////////////////////
//Helper functions
////////////////////
vector<int> Sampler::get_solution_ints(const vector<lbool>& model)
{
vector<int> solution;
if (conf.only_indep_samples) {
for (uint32_t j = 0; j < appmc->get_sampling_set().size(); j++) {
uint32_t var = appmc->get_sampling_set()[j];
assert(model[var] != l_Undef);
solution.push_back(((model[var] != l_True) ? -1: 1) * ((int)var + 1));
}
} else {
for(uint32_t var = 0; var < orig_num_vars; var++) {
assert(model[var] != l_Undef);
solution.push_back(((model[var] != l_True) ? -1: 1) * ((int)var + 1));
}
}
return solution;
}
bool Sampler::gen_rhs()
{
std::uniform_int_distribution<uint32_t> dist{0, 1};
bool rhs = dist(randomEngine);
//cout << "rnd rhs:" << (int)rhs << endl;
return rhs;
}
string Sampler::gen_rnd_bits(
const uint32_t size,
const uint32_t /*hash_index*/)
{
string randomBits;
std::uniform_int_distribution<uint32_t> dist{0, 1000};
uint32_t cutoff = 500;
while (randomBits.size() < size) {
bool val = dist(randomEngine) < cutoff;
randomBits += '0' + val;
}
assert(randomBits.size() >= size);
//cout << "rnd bits: " << randomBits << endl;
return randomBits;
}
void Sampler::print_xor(const vector<uint32_t>& vars, const uint32_t rhs)
{
cout << "c [unig] Added XOR ";
for (size_t i = 0; i < vars.size(); i++) {
cout << vars[i]+1;
if (i < vars.size()-1) {
cout << " + ";
}
}
cout << " = " << (rhs ? "True" : "False") << endl;
}
void printVersionInfoSampler()
{
cout << "c Sampler SHA revision " << ::get_version_sha1() << endl;
cout << "c Sampler version " << ::get_version_tag() << endl;
cout << "c Sampler compilation env " << ::get_compilation_env() << endl;
#ifdef __GNUC__
cout << "c Sampler compiled with gcc version " << __VERSION__ << endl;
#else
cout << "c Sampler compiled with non-gcc compiler" << endl;
#endif
}
/* Number of solutions to return from one invocation of gen_n_samples. */
uint32_t Sampler::sols_to_return(uint32_t numSolutions)
{
if (conf.startiter == 0) // TODO improve hack for ideal sampling case?
return numSolutions;
else if (conf.multisample)
return loThresh;
else
return 1;
}
void Sampler::openLogFile()
{
if (conf.logfile) {
*conf.logfile << std::left
<< std::setw(5) << "sampl"
<< " " << std::setw(4) << "iter"
<< " " << std::setw(4) << "hash"
<< " " << std::setw(4) << "full"
<< " " << std::setw(4) << "sols"
<< " " << std::setw(4) << "rep"
<< " " << std::setw(7) << "T"
<< " " << std::setw(7) << "total T"
<< endl;
}
}
void Sampler::write_log(
bool sampling,
int iter,
uint32_t hashCount,
int found_full,
uint32_t num_sols,
uint32_t repeat_sols,
double used_time
)
{
if (conf.logfile) {
*conf.logfile
<< std::left
<< std::setw(5) << (int)sampling
<< " " << std::setw(4) << iter
<< " " << std::setw(4) << hashCount
<< " " << std::setw(4) << found_full
<< " " << std::setw(4) << num_sols
<< " " << std::setw(4) << repeat_sols
<< " " << std::setw(7) << std::fixed << std::setprecision(2) << used_time
<< " " << std::setw(7) << std::fixed << std::setprecision(2) << (cpuTimeTotal() - startTime)
<< endl;
}
}
void Sampler::check_model(
const vector<lbool>& model,
const HashesModels* const hm,
const uint32_t hashCount
)
{
for(uint32_t var: appmc->get_sampling_set()) {
assert(model[var] != l_Undef);
}
if (!hm)
return;
uint32_t checked = 0;
bool ok = true;
for(const auto& h: hm->hashes) {
//This hash is number: h.first
//Only has to match hashes at & below
//Notice that "h.first" is numbered from 0, so it's a "<" not "<="
if (h.first < hashCount) {
//cout << "Checking model against hash" << h.first << endl;
checked++;
ok &= check_model_against_hash(h.second, model);
if (!ok) break;
}
}
assert(ok);
}
bool Sampler::check_model_against_hash(const Hash& h, const vector<lbool>& model)
{
bool rhs = h.rhs;
for (const uint32_t var: h.hash_vars) {
assert(model[var] != l_Undef);
rhs ^= model[var] == l_True;
}
//If we started with rhs=FALSE and we XOR-ed in only FALSE
//rhs is FALSE but we should return TRUE
//If we started with rhs=TRUE and we XOR-ed in only one TRUE
//rhs is FALSE but we should return TRUE
//hence return !rhs
return !rhs;
}
string unigen_version_info()
{
std::stringstream ss;
ss << "c UniGen SHA revision " << ::get_version_sha1() << endl;
ss << "c UniGen version " << ::get_version_tag() << endl;
ss << "c UniGen compilation env " << ::get_compilation_env() << endl;
#ifdef __GNUC__
ss << "c UniGen compiled with gcc version " << __VERSION__ << endl;
#else
ss << "c UniGen compiled with non-gcc compiler" << endl;
#endif
return ss.str();
}
string Sampler::get_version_info() const
{
string ret = unigen_version_info();
ret += appmc->get_version_info();
return ret;
}
| meelgroup/unigen |
<|start_filename|>package.json<|end_filename|>
{
"name": "project-forecaster",
"version": "1.0.0",
"description": "Forecast probabilities of a project's effort and duration using Monte Carlo simulations",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "npm install && npm run copy-all && npm run webapp",
"copy-all": "npm run copy-chart && npm run copy-jquery && npm run copy-bootstrap && npm run copy-moment",
"copy-chart": "cp node_modules/chart.js/dist/Chart.bundle.min.js node_modules/chart.js/dist/Chart.min.css node_modules/chartjs-plugin-annotation/chartjs-plugin-annotation.min.js ./",
"copy-jquery": "cp node_modules/jquery/dist/jquery.min.js ./",
"copy-bootstrap": "cp node_modules/bootstrap/dist/css/bootstrap.min.css node_modules/bootstrap/dist/js/bootstrap.bundle.min.js ./",
"copy-moment": "cp node_modules/moment/min/moment.min.js ./",
"webapp": "rm -rf webapp; mkdir webapp && cp *.html *.css *.js *.png webapp/"
},
"author": "<NAME>",
"license": "MIT",
"repository": {
"type": "git",
"url": "git+https://github.com/rodrigozr/ProjectForecaster.git"
},
"dependencies": {
"bootstrap": "^4.5.0",
"chart.js": "^2.9.3",
"chartjs-plugin-annotation": "^0.5.7",
"jquery": "^3.5.1",
"moment": "^2.27.0"
}
}
<|start_filename|>ui.js<|end_filename|>
$(window).on("load", function () {
$('[data-toggle="tooltip"]').tooltip({ delay: 500 });
function parseSamples(selector) {
let val = $(selector).val() || '';
if (val.trim().length === 0) return [];
return val.split(/[\s\n,]/).map(s => s.trim().length > 0 ? Number(s.trim()) : NaN).filter(n => n != NaN).filter(n => n >= 0);
}
function parseRisks(selector) {
const risks = [];
$(selector).find('tbody').find('.risk-row').each((_index, el) => {
const $el = $(el);
const risk = {
likelihood: $el.find("input[name='likelihood']").val(),
lowImpact: $el.find("input[name='lowImpact']").val(),
highImpact: $el.find("input[name='highImpact']").val(),
description: $el.find("input[name='description']").val(),
};
if (risk.likelihood && (risk.lowImpact || risk.highImpact)) {
if (!risk.lowImpact) risk.lowImpact = '1';
else if (!risk.highImpact) risk.highImpact = risk.lowImpact;
risk.likelihood = parseInt(risk.likelihood) || 0;
risk.lowImpact = parseInt(risk.lowImpact) || 0;
risk.highImpact = parseInt(risk.highImpact) || 0;
risks.push(risk);
}
});
return risks;
}
const $riskRowTemplate = $('#risk-row-template').clone();
function addRisk() {
const $row = $riskRowTemplate.clone();
$row.insertBefore($('#add-risk-row'));
return $row;
}
function fillRisk(risk, $row) {
$row.find("input[name='likelihood']").val(risk.likelihood);
$row.find("input[name='lowImpact']").val(risk.lowImpact);
$row.find("input[name='highImpact']").val(risk.highImpact);
$row.find("input[name='description']").val(risk.description);
}
const $probabilitiesRowTemplate = $('#probabilities').find('.probabilities-row').clone();
function addProbabilityRow() {
const $row = $probabilitiesRowTemplate.clone();
$row.insertBefore('#show-more-row');
return $row;
}
function clearProbabilities() {
$('.probabilities-row').remove();
}
function share() {
if (readSimulationData()) {
navigator.clipboard.writeText(location.href);
$('#share').popover('show');
setTimeout(() => $('#share').popover('dispose'), 5000);
}
}
let currentlyLoadedHash = null;
function readSimulationData() {
const simulationData = {
projectName: $('#projectName').val(),
numberOfSimulations: parseInt($('#numberOfSimulations').val()),
confidenceLevel: parseInt($('#confidenceLevel').val()) || 85,
tpSamples: parseSamples('#tpSamples'),
ltSamples: parseSamples('#ltSamples'),
splitRateSamples: parseSamples('#splitRateSamples'),
risks: parseRisks('#risks'),
numberOfTasks: parseInt($('#numberOfTasks').val()),
totalContributors: parseInt($('#totalContributors').val()),
minContributors: parseInt($('#minContributors').val()),
maxContributors: parseInt($('#maxContributors').val()),
sCurveSize: parseInt($('#sCurveSize').val()),
startDate: $('#startDate').val() || undefined
};
if (!simulationData.tpSamples.some(n => n >= 1)) {
alert("Must have at least one weekly throughput sample greater than zero");
return false;
}
if (simulationData.splitRateSamples.length > 0 && simulationData.splitRateSamples.some(n => n > 10 || n < 0.2)) {
alert("Your split rates don't seem correct.\nFor a 10% split rate in a project, you should put '1.1', for example. Please correct before proceeding");
return false;
}
simulationData.minContributors = simulationData.minContributors || simulationData.totalContributors;
simulationData.maxContributors = simulationData.maxContributors || simulationData.totalContributors;
const hash = '#' + btoa(JSON.stringify(simulationData));
currentlyLoadedHash = hash;
location.hash = hash;
return simulationData;
}
function runSimulation() {
const simulationData = readSimulationData();
if (!simulationData) return;
loadDataFromUrl();
$('#results-main').show();
const $results = $('#results');
$results.val('');
const write = str => $results.val($results.val() + str);
$('#res-effort').val('Running...');
setTimeout(() => {
// Run the simulation
const startTime = Date.now();
const result = runMonteCarloSimulation(simulationData);
const elapsed = Date.now() - startTime;
$results.val('');
// Report the results
const confidenceLevel = simulationData.confidenceLevel;
const reportPercentile = confidenceLevel / 100;
const effort = Math.round(percentile(result.simulations.map(s => s.effortWeeks), reportPercentile, true));
const duration = Math.round(percentile(result.simulations.map(s => s.durationInCalendarWeeks), reportPercentile, true));
$('#res-summary-header').text(`Project forecast summary (with ${confidenceLevel}% of confidence):`);
$('#res-effort').val(effort);
$('#res-duration').val(duration);
let endDate = '(No start date set)';
if (simulationData.startDate) {
endDate = moment(simulationData.startDate).add(duration, 'weeks').format("MMM Do YYYY");
}
$('#res-endDate').val(endDate);
// Probabilities
clearProbabilities();
$('#show-more-row').show();
$('#show-more').show();
const addProbability = (res) => {
const comment = res.Likelihood > 80 ? 'Almost certain' : res.Likelihood > 45 ? 'Somewhat certain' : 'Less than coin-toss odds';
const style = res.Likelihood > 80 ? 'almost-certain' : res.Likelihood > 45 ? 'somewhat-certain' : 'not-certain';
const $row = addProbabilityRow();
const $cells = $row.find('td');
$cells.addClass(style);
$cells.eq(0).text(res.Likelihood + '%');
$cells.eq(1).text(res.Effort.toString());
$cells.eq(2).text(res.Duration.toString());
$cells.eq(3).text(res.TotalTasks.toString());
if (simulationData.startDate) {
$cells.eq(4).text(moment(simulationData.startDate).add(res.Duration, 'weeks').format("MMM Do YYYY"));
}
$cells.eq(5).text(comment);
}
result.resultsTable.slice(0, 9).forEach(addProbability);
$('#show-more').off('click').on('click', () => {
result.resultsTable.slice(9).forEach(addProbability);
$('#show-more').off('click').hide();
$('#show-more-row').hide();
});
drawHistogram('res-duration-histogram', result.simulations.map(s => s.durationInCalendarWeeks), confidenceLevel);
drawBurnDowns('res-burn-downs', result.burnDowns);
drawScatterPlot('res-effort-scatter-plot', result.simulations.map(s => s.effortWeeks), confidenceLevel);
write(`Project forecast summary (with ${confidenceLevel}% of confidence):\n`);
write(` - Up to ${effort} person-weeks of effort\n`);
write(` - Can be delivered in up to ${duration} calendar weeks\n`);
if (simulationData.startDate) {
write(` - Can be delivered by ${endDate}\n`);
}
write(`\n\n`);
write(`-----------------------------------------------------\n`);
write(` DETAILS\n`);
write(`-----------------------------------------------------\n`);
write(`Elapsed time: ${elapsed} ms (${Math.round(simulationData.numberOfSimulations / elapsed * 1000)} simulations per second)\n`);
write('All probabilities:\n')
write(` Likelihood\tDuration\tTasks\tEffort \tComment\n`);
for (const res of result.resultsTable) {
const comment = res.Likelihood > 80 ? 'Almost certain' : res.Likelihood > 45 ? 'Somewhat certain' : 'Less than coin-toss odds';
write(` ${res.Likelihood}% \t${res.Duration} weeks \t${res.TotalTasks}\t${res.Effort} person-weeks \t(${comment})\n`);
}
write(`\n`);
write(`Error rates:\n - Weekly throughput: ${result.tpErrorRate}%\n - Task lead-times: ${result.ltErrorRate}%\n`);
write(` (Aim to keep these below 25% by adding more sample data. (< 10% Great, < 25% Good)\n`);
write(` This is the measure of how two random groups of your sample data would align when forecasting.\n`);
write(` Anything below 25% is good, but lower is better. It grows if there is too little data\n`);
write(` and ALSO if the process changes over time and you use too much data.)\n`);
}, 100);
}
function loadDataFromUrl() {
try {
currentlyLoadedHash = location.hash;
const simulationData = JSON.parse(atob(location.hash.trim().substring(1)));
for (const name of Object.getOwnPropertyNames(simulationData)) {
const $el = $('#' + name);
if ($el.is('input,textarea')) {
$el.val(typeof (simulationData[name]) == 'Array' ? simulationData[name].join(',') : simulationData[name]);
}
}
$('#risks').find('.risk-row').remove();
if (simulationData.risks && simulationData.risks.length > 0) {
for (const risk of simulationData.risks) {
fillRisk(risk, addRisk());
}
}
return true;
} catch (error) {
console.error(error);
return false;
}
}
if (location.hash && location.hash.trim().length > 1) {
if (loadDataFromUrl()) {
runSimulation();
}
}
window.onhashchange = function () {
if (currentlyLoadedHash != location.hash) {
location.reload();
}
}
$('#addRisk').on('click', addRisk);
$('#share').on('click', share);
$('#run').on('click', runSimulation);
});
<|start_filename|>monte_carlo.js<|end_filename|>
/**
* Returns the value at a given percentile in a sorted numeric array.
* "Linear interpolation between closest ranks" method
* @param {Array} arr sorted numeric array
* @param {Number} p percentile number between 0 (p0) and 1 (p100)
* @param {Boolean} sort if true, the array will be sorted for you
* @returns the value at a given percentile
*/
function percentile(arr, p, sort = false) {
if (arr.length === 0) return 0;
if (typeof p !== 'number') throw new TypeError('p must be a number');
if (sort) sortNumbers(arr);
if (p <= 0) return arr[0];
if (p >= 1) return arr[arr.length - 1];
const index = (arr.length - 1) * p,
lower = Math.floor(index),
upper = lower + 1,
weight = index % 1;
if (upper >= arr.length) return arr[lower];
return arr[lower] * (1 - weight) + arr[upper] * weight;
}
/**
* Sorts a numeric array
* @param {Array} array numeric array
*/
function sortNumbers(array) {
return array.sort((a, b) => a - b);
}
/**
* Generates a random integer between "min" and "max"
* @param {Number} min minimum number (inclusive)
* @param {Number} max maximum number (inclusive)
* @returns random integer
*/
function randomInteger(min, max) {
return Math.floor(Math.random() * (max + 1 - min)) + min;
}
/**
* Retrieves a random element from an array
* @param {Array} array array of item
*/
function randomElement(array) {
return array[randomInteger(0, array.length - 1)];
}
/**
* Generates an average of random sample elements from a numeric array
* @param {Array} array numeric array
* @param {Number} minNumberOfItems minimum number of random samples to average
* @param {Number} maxNumberOfItems maximum number of random samples to average
* @returns the average for the random samples selected
*/
function randomSampleAverage(array, minNumberOfItems, maxNumberOfItems) {
if (array.length == 0) return 0;
const numberOfItems = randomInteger(minNumberOfItems, maxNumberOfItems);
let total = 0;
for (let i = 0; i < numberOfItems; i++) {
total += randomElement(array);
}
return (total / numberOfItems);
}
/**
* Calculates the estimated error rate/range for the given numeric array
* @param {Array} array numeric array
* @returns estimated error rate/range between 0 and 100
*/
function errorRate(array) {
if (array.length <= 1) return 0;
const sortedArray = sortNumbers([...array]);
const min = Math.min(...sortedArray);
const max = Math.max(...sortedArray);
const group1 = [...sortedArray].filter((_val, index) => index % 2 != 0)
const g1avg = group1.reduce((a, b) => a + b, 0) / group1.length;
const group2 = [...sortedArray].filter((_val, index) => index % 2 == 0)
const g2avg = group2.reduce((a, b) => a + b, 0) / group2.length;
const avgError = Math.abs(g1avg - g2avg)
return Math.round(100 * avgError / (max - min));
}
/**
* Calculates the "S-curve" distribution of individual contributors for the given simulation data
* @param {*} simulationData simulation data
* @returns numeric array with exactly 100 elements, and each position in the array represents the number of individual contributors for that percentage of completion in the project
*/
function calculateContributorsDistribution(simulationData) {
const { minContributors, maxContributors, sCurveSize } = simulationData;
const curveSize = Math.max(0, Math.min(50, sCurveSize));
const curveTailStart = 100 - curveSize;
const contributorsRange = [];
for (let i = minContributors; i < maxContributors; i++) {
contributorsRange.push(i);
}
const contributorsDistribution = [];
const get = p => Math.min(maxContributors, Math.max(minContributors, Math.round(percentile(contributorsRange, p))));
for (let i = 0; i < 100; i++) {
if (i < curveSize) contributorsDistribution.push(get(i / curveSize));
else if (i < curveTailStart) contributorsDistribution.push(maxContributors);
else contributorsDistribution.push(get((100 - i) / curveSize));
}
return contributorsDistribution;
}
/**
* Executes a single round of Monte Carlo burn down simulation for the given simulation data
* @param {*} simulationData simulation data
* @returns simulation result for this round
*/
function simulateBurnDown(simulationData) {
// Caches the "S-curve" distribution in the first run
if (!simulationData.contributorsDistribution) {
simulationData.contributorsDistribution = calculateContributorsDistribution(simulationData);
}
const { tpSamples, ltSamples, splitRateSamples, risks, numberOfTasks, totalContributors, maxContributors, contributorsDistribution } = simulationData;
// Retrieve a random split rate for this round
const randomSplitRate = randomSampleAverage(splitRateSamples, 1, splitRateSamples.length * 3) || 1.0;
// Calculate random impacts for this round
let impactTasks = 0;
for (const risk of risks) {
if (Math.random() <= risk.likelihood) {
impactTasks += randomInteger(risk.lowImpact, risk.highImpact);
}
}
// Calculate the number of tasks for this round
const totalTasks = Math.round((numberOfTasks + impactTasks) * randomSplitRate);
// Extend the duration by a random sample average of lead times
const leadTime = randomSampleAverage(ltSamples, Math.round(ltSamples.length * 0.1), Math.round(ltSamples.length * 0.9)) || 0;
let durationInCalendarWeeks = Math.round(leadTime / 7);
let weekNumber = 0
let effortWeeks = 0;
const burnDown = [];
let remainingTasks = totalTasks;
// Run the simulation
while (remainingTasks > 0) {
burnDown.push(Math.ceil(remainingTasks));
const randomTp = randomElement(tpSamples);
const percentComplete = Math.max(0, Math.min(99, Math.round((totalTasks - remainingTasks) / totalTasks * 100)));
const contributorsThisWeek = contributorsDistribution[percentComplete];
const adjustedTp = (randomTp * (contributorsThisWeek / totalContributors));
remainingTasks -= adjustedTp;
durationInCalendarWeeks++;
weekNumber++;
effortWeeks += contributorsThisWeek;
}
burnDown.push(0);
return {
totalTasks,
durationInCalendarWeeks,
leadTime,
effortWeeks,
burnDown,
}
}
/**
* Run a full Monte Carlo simulation for the given data
* @param {*} simulationData simulation data
* @returns result of the simulation
*/
function runMonteCarloSimulation(simulationData) {
simulationData = {...simulationData};
for (const risk of simulationData.risks) {
if (risk.likelihood >= 1) risk.likelihood /= 100;
}
const { numberOfSimulations } = simulationData;
const burnDowns = [];
const simulations = [];
for (let i = 0; i < numberOfSimulations; i++) {
const res = simulateBurnDown(simulationData);
simulations.push({
durationInCalendarWeeks: res.durationInCalendarWeeks,
totalTasks: res.totalTasks,
leadTime: res.leadTime,
effortWeeks: res.effortWeeks,
});
if (i < 100) {
burnDowns.push(res.burnDown);
}
}
const durationHistogram = sortNumbers(simulations.map(s => s.durationInCalendarWeeks));
const tasksHistogram = sortNumbers(simulations.map(s => s.totalTasks));
const ltHistogram = sortNumbers(simulations.map(s => s.leadTime));
const effortHistogram = sortNumbers(simulations.map(s => s.effortWeeks));
let resultsTable = [];
let p = 100;
while (p >= 0) {
const duration = percentile(durationHistogram, p / 100);
const tasks = percentile(tasksHistogram, p / 100);
const leadTime = percentile(ltHistogram, p / 100);
const effort = percentile(effortHistogram, p / 100);
resultsTable.push({
Likelihood: p,
Duration: Math.round(duration),
TotalTasks: Math.round(tasks),
Effort: Math.round(effort),
LT: Math.round(leadTime)
});
p -= 5;
}
const tpErrorRate = errorRate(simulationData.tpSamples);
const ltErrorRate = errorRate(simulationData.ltSamples);
return {
simulations,
burnDowns,
tpErrorRate,
ltErrorRate,
resultsTable,
}
}
<|start_filename|>draw-charts.js<|end_filename|>
const chartsCache = {};
function drawHistogram(id, durations, linePercentile = 85) {
if (chartsCache[id]) {
chartsCache[id].destroy();
chartsCache[id] = null;
}
const ctx = document.getElementById(id).getContext('2d');
const histogram = {};
for (const val of durations) {
histogram[val] = (histogram[val] || 0) + 1;
}
const keys = sortNumbers(Object.keys(histogram));
const labels = keys.map(n => n.toString());
const data = keys.map(key => histogram[key]);
const lineValue = Math.round(percentile(durations, linePercentile/100, true));
const lineIndex = labels.findIndex(val => lineValue < val) - 0.5;
chartsCache[id] = new Chart(ctx, {
type: 'bar',
data: {
labels: labels,
datasets: [{
data: data,
borderWidth: 1,
backgroundColor: 'rgba(54, 162, 235, 0.2)',
borderColor: 'rgba(54, 162, 235, 1)',
}]
},
options: {
title: {
display: true,
text: "Duration histogram"
},
legend: {
display: false
},
tooltips: {
mode: 'disabled'
},
scales: {
yAxes: [{
ticks: {
beginAtZero: true
},
scaleLabel: {
display: true,
labelString: 'Frequency of occurrences'
}
}],
xAxes: [
{
scaleLabel: {
display: true,
labelString: 'Calendar weeks'
}
}
]
},
annotation: {
drawTime: 'afterDraw',
annotations: [{
type: 'line',
mode: 'vertical',
value: lineIndex,
scaleID: 'x-axis-0',
borderColor: 'red',
borderWidth: 2,
borderDash: [2, 2],
label: {
enabled: true,
content: `p${linePercentile}`,
position: 'top',
yAdjust: 10,
fontSize: 10,
backgroundColor: 'rgba(0,0,0,0.6)',
}
}]
}
}
});
}
function drawBurnDowns(id, burnDowns) {
if (chartsCache[id]) {
chartsCache[id].destroy();
chartsCache[id] = null;
}
const ctx = document.getElementById(id).getContext('2d');
const max = Math.max(...burnDowns.map(b => b.length));
const labels = []
for (let i = 1; i <= max; i++) {
labels.push(i.toString());
}
const datasets = burnDowns.map(burnDown => ({
label: { mode: 'disabled' },
data: burnDown,
fill: false,
borderWidth: 1,
pointRadius: 0,
pointHoverRadius: 0,
}));
chartsCache[id] = new Chart(ctx, {
type: 'line',
data: {
labels: labels,
datasets: datasets
},
options: {
title: {
display: true,
text: "First 100 burn downs"
},
legend: {
display: false
},
tooltips: {
mode: 'disabled'
},
scales: {
yAxes: [{
ticks: {
beginAtZero: true
},
scaleLabel: {
display: true,
labelString: 'Remaining tasks'
}
}],
xAxes: [
{
scaleLabel: {
display: true,
labelString: 'Week number'
}
}
]
}
}
});
}
function drawScatterPlot(id, values, linePercentile = 85) {
if (chartsCache[id]) {
chartsCache[id].destroy();
chartsCache[id] = null;
}
const data = values.slice(0, 500).map((val, index) => ({x: index, y: val}))
const lineValue = Math.round(percentile(values, linePercentile/100, true));
const ctx = document.getElementById(id).getContext('2d');
chartsCache[id] = new Chart(ctx, {
type: 'scatter',
data: {
//labels: labels,
datasets: [{
data: data,
//borderWidth: 1,
pointBackgroundColor: 'rgba(54, 162, 235, 0.2)',
pointBorderColor: 'rgba(54, 162, 235, 1)',
}]
},
options: {
title: {
display: true,
text: "Effort scatter plot (first 500 runs)"
},
legend: {
display: false
},
tooltips: {
mode: 'disabled'
},
scales: {
yAxes: [{
ticks: {
beginAtZero: true
},
scaleLabel: {
display: true,
labelString: 'Effort in person-weeks'
}
}]
},
annotation: {
drawTime: 'afterDraw',
annotations: [{
type: 'line',
mode: 'horizontal',
value: lineValue,
scaleID: 'y-axis-1',
borderColor: 'red',
borderWidth: 2,
borderDash: [2, 2],
label: {
enabled: true,
content: `p${linePercentile}`,
fontSize: 10,
backgroundColor: 'rgba(0,0,0,0.6)',
}
}]
}
}
});
}
| rodrigozr/ProjectForecaster |
<|start_filename|>src/pages/Confirmation/index.js<|end_filename|>
export { default } from "./Confirmation";
| AlbertGarciaFernandez/react-shopping-cart-context |
<|start_filename|>client/report/MonthlyScatter.js<|end_filename|>
import React, {useState, useEffect} from 'react'
import { makeStyles } from '@material-ui/core/styles'
import Typography from '@material-ui/core/Typography'
import auth from '../auth/auth-helper'
import DateFnsUtils from '@date-io/date-fns'
import { DatePicker, MuiPickersUtilsProvider} from "@material-ui/pickers"
import {plotExpenses} from './../expense/api-expense.js'
import { VictoryTheme, VictoryScatter, VictoryChart, VictoryTooltip, VictoryLabel} from "victory";
const useStyles = makeStyles(theme => ({
title: {
padding:`32px ${theme.spacing(2.5)}px 2px`,
color: '#2bbd7e',
display:'inline'
}
}))
export default function MonthlyScatter() {
const classes = useStyles()
const [error, setError] = useState('')
const [plot, setPlot] = useState([])
const [month, setMonth] = useState(new Date())
const jwt = auth.isAuthenticated()
useEffect(() => {
const abortController = new AbortController()
const signal = abortController.signal
plotExpenses({month: month},{t: jwt.token}, signal).then((data) => {
if (data.error) {
setError(data.error)
} else {
setPlot(data)
}
})
return function cleanup(){
abortController.abort()
}
}, [])
const handleDateChange = date => {
setMonth(date)
plotExpenses({month: date},{t: jwt.token}).then((data) => {
if (data.error) {
setError(data.error)
} else {
setPlot(data)
}
})
}
return (
<div style={{marginBottom: 20}}>
<Typography variant="h6" className={classes.title}>Expenses scattered over </Typography>
<MuiPickersUtilsProvider utils={DateFnsUtils}>
<DatePicker value={month} onChange={handleDateChange} views={["year", "month"]}
disableFuture
label="Month"
animateYearScrolling
variant="inline"/>
</MuiPickersUtilsProvider>
<VictoryChart
theme={VictoryTheme.material}
height={400}
width={550}
domainPadding={40}
>
<VictoryScatter
style={{
data: { fill: "#01579b", stroke: "#69f0ae", strokeWidth: 2 },
labels: { fill: "#01579b", fontSize: 10, padding:8}
}}
bubbleProperty="y"
maxBubbleSize={15}
minBubbleSize={5}
labels={({ datum }) => `$${datum.y} on ${datum.x}th`}
labelComponent={<VictoryTooltip/>}
data={plot}
domain={{x: [0, 31]}}
/>
<VictoryLabel
textAnchor="middle"
style={{ fontSize: 14, fill: '#8b8b8b' }}
x={270} y={390}
text={`day of month`}
/>
<VictoryLabel
textAnchor="middle"
style={{ fontSize: 14, fill: '#8b8b8b' }}
x={6} y={190}
angle = {270}
text={`Amount ($)`}
/>
</VictoryChart>
</div>
)
} | vicfull1228/mern-expense-tracker |
<|start_filename|>src/index.js<|end_filename|>
"use strict";
/**
* a11yCore module - used as a namespace.
* @module a11yCore
*/
import * as color from "a11y-color";
export { color };
<|start_filename|>docs/index.js.html<|end_filename|>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>JSDoc: Source: index.js</title>
<script src="scripts/prettify/prettify.js"> </script>
<script src="scripts/prettify/lang-css.js"> </script>
<!--[if lt IE 9]>
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
<link type="text/css" rel="stylesheet" href="styles/prettify-tomorrow.css">
<link type="text/css" rel="stylesheet" href="styles/jsdoc-default.css">
</head>
<body>
<div id="main">
<h1 class="page-title">Source: index.js</h1>
<section>
<article>
<pre class="prettyprint source linenums"><code>"use strict";
/**
* a11yCore module - used as a namespace.
* @module a11yCore
*/
Object.defineProperty(exports, "__esModule", {
value: true
});
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj["default"] = obj; return newObj; } }
var _a11yColor = require("a11y-color");
var color = _interopRequireWildcard(_a11yColor);
exports.color = color;</code></pre>
</article>
</section>
</div>
<nav>
<h2><a href="index.html">Index</a></h2><h3>Modules</h3><ul><li><a href="module-a11yCore.html">a11yCore</a></li></ul>
</nav>
<br clear="both">
<footer>
Documentation generated by <a href="https://github.com/jsdoc3/jsdoc">JSDoc 3.3.0-alpha5</a> on Sun Nov 13 2016 19:28:54 GMT-0800 (PST)
</footer>
<script> prettyPrint(); </script>
<script src="scripts/linenumber.js"> </script>
</body>
</html>
<|start_filename|>test/color.js<|end_filename|>
"use strict";
let {expect} = require("chai");
import * as a11yCore from "../src/index";
describe("a11yCore", () => {
describe("color", () => {
it("has the expected API", () => {
expect(a11yCore.color.Color).to.exist;
expect(a11yCore.color.calculateContrastRatio).to.exist;
expect(a11yCore.color.flattenColors).to.exist;
expect(a11yCore.color.parseColor).to.exist;
expect(a11yCore.color.suggestColors).to.exist;
});
});
});
| A11yance/a11y-core |
<|start_filename|>src/QtsSolutions.Web.Core/Models/TokenAuth/ExternalLoginProviderInfoModel.cs<|end_filename|>
using Abp.AutoMapper;
using QtsSolutions.Authentication.External;
namespace QtsSolutions.Models.TokenAuth
{
[AutoMapFrom(typeof(ExternalLoginProviderInfo))]
public class ExternalLoginProviderInfoModel
{
public string Name { get; set; }
public string ClientId { get; set; }
}
}
<|start_filename|>src/QtsSolutions.EntityFrameworkCore/EntityFrameworkCore/QtsSolutionsDbContextConfigurer.cs<|end_filename|>
using System.Data.Common;
using Microsoft.EntityFrameworkCore;
namespace QtsSolutions.EntityFrameworkCore
{
public static class QtsSolutionsDbContextConfigurer
{
public static void Configure(DbContextOptionsBuilder<QtsSolutionsDbContext> builder, string connectionString)
{
builder.UseSqlServer(connectionString);
}
public static void Configure(DbContextOptionsBuilder<QtsSolutionsDbContext> builder, DbConnection connection)
{
builder.UseSqlServer(connection);
}
}
}
<|start_filename|>src/QtsSolutions.Web.Host/Startup/QtsSolutionsWebHostModule.cs<|end_filename|>
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Abp.Modules;
using Abp.Reflection.Extensions;
using QtsSolutions.Configuration;
namespace QtsSolutions.Web.Host.Startup
{
[DependsOn(
typeof(QtsSolutionsWebCoreModule))]
public class QtsSolutionsWebHostModule: AbpModule
{
private readonly IWebHostEnvironment _env;
private readonly IConfigurationRoot _appConfiguration;
public QtsSolutionsWebHostModule(IWebHostEnvironment env)
{
_env = env;
_appConfiguration = env.GetAppConfiguration();
}
public override void Initialize()
{
IocManager.RegisterAssemblyByConvention(typeof(QtsSolutionsWebHostModule).GetAssembly());
}
}
}
<|start_filename|>src/QtsSolutions.Core/Identity/SecurityStampValidator.cs<|end_filename|>
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Options;
using Abp.Authorization;
using QtsSolutions.Authorization.Roles;
using QtsSolutions.Authorization.Users;
using QtsSolutions.MultiTenancy;
using Microsoft.Extensions.Logging;
namespace QtsSolutions.Identity
{
public class SecurityStampValidator : AbpSecurityStampValidator<Tenant, Role, User>
{
public SecurityStampValidator(
IOptions<SecurityStampValidatorOptions> options,
SignInManager signInManager,
ISystemClock systemClock,
ILoggerFactory loggerFactory)
: base(options, signInManager, systemClock, loggerFactory)
{
}
}
}
<|start_filename|>src/QtsSolutions.Application/MultiTenancy/TenantAppService.cs<|end_filename|>
using System.Linq;
using System.Threading.Tasks;
using Abp.Application.Services;
using Abp.Application.Services.Dto;
using Abp.Authorization;
using Abp.Domain.Repositories;
using Abp.Extensions;
using Abp.IdentityFramework;
using Abp.Linq.Extensions;
using Abp.MultiTenancy;
using Abp.Runtime.Security;
using QtsSolutions.Authorization;
using QtsSolutions.Authorization.Roles;
using QtsSolutions.Authorization.Users;
using QtsSolutions.Editions;
using QtsSolutions.MultiTenancy.Dto;
using Microsoft.AspNetCore.Identity;
namespace QtsSolutions.MultiTenancy
{
[AbpAuthorize(PermissionNames.Pages_Tenants)]
public class TenantAppService : AsyncCrudAppService<Tenant, TenantDto, int, PagedTenantResultRequestDto, CreateTenantDto, TenantDto>, ITenantAppService
{
private readonly TenantManager _tenantManager;
private readonly EditionManager _editionManager;
private readonly UserManager _userManager;
private readonly RoleManager _roleManager;
private readonly IAbpZeroDbMigrator _abpZeroDbMigrator;
public TenantAppService(
IRepository<Tenant, int> repository,
TenantManager tenantManager,
EditionManager editionManager,
UserManager userManager,
RoleManager roleManager,
IAbpZeroDbMigrator abpZeroDbMigrator)
: base(repository)
{
_tenantManager = tenantManager;
_editionManager = editionManager;
_userManager = userManager;
_roleManager = roleManager;
_abpZeroDbMigrator = abpZeroDbMigrator;
}
public override async Task<TenantDto> CreateAsync(CreateTenantDto input)
{
CheckCreatePermission();
// Create tenant
var tenant = ObjectMapper.Map<Tenant>(input);
tenant.ConnectionString = input.ConnectionString.IsNullOrEmpty()
? null
: SimpleStringCipher.Instance.Encrypt(input.ConnectionString);
var defaultEdition = await _editionManager.FindByNameAsync(EditionManager.DefaultEditionName);
if (defaultEdition != null)
{
tenant.EditionId = defaultEdition.Id;
}
await _tenantManager.CreateAsync(tenant);
await CurrentUnitOfWork.SaveChangesAsync(); // To get new tenant's id.
// Create tenant database
_abpZeroDbMigrator.CreateOrMigrateForTenant(tenant);
// We are working entities of new tenant, so changing tenant filter
using (CurrentUnitOfWork.SetTenantId(tenant.Id))
{
// Create static roles for new tenant
CheckErrors(await _roleManager.CreateStaticRoles(tenant.Id));
await CurrentUnitOfWork.SaveChangesAsync(); // To get static role ids
// Grant all permissions to admin role
var adminRole = _roleManager.Roles.Single(r => r.Name == StaticRoleNames.Tenants.Admin);
await _roleManager.GrantAllPermissionsAsync(adminRole);
// Create admin user for the tenant
var adminUser = User.CreateTenantAdminUser(tenant.Id, input.AdminEmailAddress);
await _userManager.InitializeOptionsAsync(tenant.Id);
CheckErrors(await _userManager.CreateAsync(adminUser, User.DefaultPassword));
await CurrentUnitOfWork.SaveChangesAsync(); // To get admin user's id
// Assign admin user to role!
CheckErrors(await _userManager.AddToRoleAsync(adminUser, adminRole.Name));
await CurrentUnitOfWork.SaveChangesAsync();
}
return MapToEntityDto(tenant);
}
protected override IQueryable<Tenant> CreateFilteredQuery(PagedTenantResultRequestDto input)
{
return Repository.GetAll()
.WhereIf(!input.Keyword.IsNullOrWhiteSpace(), x => x.TenancyName.Contains(input.Keyword) || x.Name.Contains(input.Keyword))
.WhereIf(input.IsActive.HasValue, x => x.IsActive == input.IsActive);
}
protected override void MapToEntity(TenantDto updateInput, Tenant entity)
{
// Manually mapped since TenantDto contains non-editable properties too.
entity.Name = updateInput.Name;
entity.TenancyName = updateInput.TenancyName;
entity.IsActive = updateInput.IsActive;
}
public override async Task DeleteAsync(EntityDto<int> input)
{
CheckDeletePermission();
var tenant = await _tenantManager.GetByIdAsync(input.Id);
await _tenantManager.DeleteAsync(tenant);
}
private void CheckErrors(IdentityResult identityResult)
{
identityResult.CheckErrors(LocalizationManager);
}
}
}
<|start_filename|>src/QtsSolutions.Web.Host/appsettings.Staging.json<|end_filename|>
{
"ConnectionStrings": {
"Default": "Server=DESKTOP-02RK9HN\\QUADRATESQL; Database=QtsSolutionsDb; Integrated Security=True;MultipleActiveResultSets=True"
},
"App": {
"ServerRootAddress": "http://localhost:9902/",
"CorsOrigins": "http://localhost:9902"
}
}
<|start_filename|>src/QtsSolutions.Web.Core/Controllers/QtsSolutionsControllerBase.cs<|end_filename|>
using Abp.AspNetCore.Mvc.Controllers;
using Abp.IdentityFramework;
using Microsoft.AspNetCore.Identity;
namespace QtsSolutions.Controllers
{
public abstract class QtsSolutionsControllerBase: AbpController
{
protected QtsSolutionsControllerBase()
{
LocalizationSourceName = QtsSolutionsConsts.LocalizationSourceName;
}
protected void CheckErrors(IdentityResult identityResult)
{
identityResult.CheckErrors(LocalizationManager);
}
}
}
<|start_filename|>src/QtsSolutions.Application/MultiTenancy/ITenantAppService.cs<|end_filename|>
using Abp.Application.Services;
using QtsSolutions.MultiTenancy.Dto;
namespace QtsSolutions.MultiTenancy
{
public interface ITenantAppService : IAsyncCrudAppService<TenantDto, int, PagedTenantResultRequestDto, CreateTenantDto, TenantDto>
{
}
}
<|start_filename|>src/QtsSolutions.Application/Sessions/Dto/TenantLoginInfoDto.cs<|end_filename|>
using Abp.Application.Services.Dto;
using Abp.AutoMapper;
using QtsSolutions.MultiTenancy;
namespace QtsSolutions.Sessions.Dto
{
[AutoMapFrom(typeof(Tenant))]
public class TenantLoginInfoDto : EntityDto
{
public string TenancyName { get; set; }
public string Name { get; set; }
}
}
<|start_filename|>src/QtsSolutions.Core/Identity/IdentityRegistrar.cs<|end_filename|>
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.DependencyInjection;
using QtsSolutions.Authorization;
using QtsSolutions.Authorization.Roles;
using QtsSolutions.Authorization.Users;
using QtsSolutions.Editions;
using QtsSolutions.MultiTenancy;
namespace QtsSolutions.Identity
{
public static class IdentityRegistrar
{
public static IdentityBuilder Register(IServiceCollection services)
{
services.AddLogging();
return services.AddAbpIdentity<Tenant, User, Role>()
.AddAbpTenantManager<TenantManager>()
.AddAbpUserManager<UserManager>()
.AddAbpRoleManager<RoleManager>()
.AddAbpEditionManager<EditionManager>()
.AddAbpUserStore<UserStore>()
.AddAbpRoleStore<RoleStore>()
.AddAbpLogInManager<LogInManager>()
.AddAbpSignInManager<SignInManager>()
.AddAbpSecurityStampValidator<SecurityStampValidator>()
.AddAbpUserClaimsPrincipalFactory<UserClaimsPrincipalFactory>()
.AddPermissionChecker<PermissionChecker>()
.AddDefaultTokenProviders();
}
}
}
<|start_filename|>src/QtsSolutions.Core/Localization/QtsSolutionsLocalizationConfigurer.cs<|end_filename|>
using Abp.Configuration.Startup;
using Abp.Localization.Dictionaries;
using Abp.Localization.Dictionaries.Xml;
using Abp.Reflection.Extensions;
namespace QtsSolutions.Localization
{
public static class QtsSolutionsLocalizationConfigurer
{
public static void Configure(ILocalizationConfiguration localizationConfiguration)
{
localizationConfiguration.Sources.Add(
new DictionaryBasedLocalizationSource(QtsSolutionsConsts.LocalizationSourceName,
new XmlEmbeddedFileLocalizationDictionaryProvider(
typeof(QtsSolutionsLocalizationConfigurer).GetAssembly(),
"QtsSolutions.Localization.SourceFiles"
)
)
);
}
}
}
<|start_filename|>src/QtsSolutions.Application/Configuration/IConfigurationAppService.cs<|end_filename|>
using System.Threading.Tasks;
using QtsSolutions.Configuration.Dto;
namespace QtsSolutions.Configuration
{
public interface IConfigurationAppService
{
Task ChangeUiTheme(ChangeUiThemeInput input);
}
}
<|start_filename|>src/QtsSolutions.EntityFrameworkCore/EntityFrameworkCore/QtsSolutionsDbContext.cs<|end_filename|>
using Microsoft.EntityFrameworkCore;
using Abp.Zero.EntityFrameworkCore;
using QtsSolutions.Authorization.Roles;
using QtsSolutions.Authorization.Users;
using QtsSolutions.MultiTenancy;
namespace QtsSolutions.EntityFrameworkCore
{
public class QtsSolutionsDbContext : AbpZeroDbContext<Tenant, Role, User, QtsSolutionsDbContext>
{
/* Define a DbSet for each entity of the application */
public QtsSolutionsDbContext(DbContextOptions<QtsSolutionsDbContext> options)
: base(options)
{
}
}
}
| nirzaf/QtsSolutions |
<|start_filename|>www/public/app/js/modules/hawk.js<|end_filename|>
module.exports = require('@hawk.so/javascript');
| neSpecc/codex.edu |
<|start_filename|>go/core/ais.go<|end_filename|>
package core
const (
// domain name for asr service
ASR_ENDPOINT string = "sis.cn-north-1.myhuaweicloud.com"
// domain name for tts service
TTS_ENDPOINT string = "sis.cn-north-1.myhuaweicloud.com"
// domain name for get token
IAM_ENPOINT string = "iam.cn-north-1.myhuaweicloud.com"
// the uri for get token
IAM_TOKEN string = "/v3/auth/tokens"
// instrument recognition uri
INSTRUMENT = "/v1.0/image/classify/instrument"
// the uri for Service image tagging
IMAGE_TAGGING string = "/v1.0/image/tagging"
// the uri for Service asr bgm
ASR_BGM string = "/v1.0/bgm/recognition"
// the uri for Service asr scentence
ASR_SCENTENCE string = "/v1.0/voice/asr/sentence"
// the uri for Service celebrity recognition
CELEBRITY_RECOGNITION string = "/v1.0/image/celebrity-recognition"
// the uri for Service clarity detect
IMAGE_CLARITY_DETECT string = "/v1.0/moderation/image/clarity-detect"
// the uri for Service dark enhance
DARK_ENHANCE string = "/v1.0/vision/dark-enhance"
// the uri for Service distortion correction
DISTORTION_CORRECTION string = "/v1.0/moderation/image/distortion-correct"
// the uri for Service image anti porn
IMAGE_ANTI_PORN string = "/v1.0/moderation/image/anti-porn"
// the uri for Service image content
IMAGE_MODERATION string = "/v1.0/moderation/image"
// the uri for Service image defog
IMAGE_DEFOG string = "/v1.0/vision/defog"
// the uri for Service moderation text
MODERATION_TEXT string = "/v1.0/moderation/text"
// the uri for Service recapture detect
RECAPTURE_DETECT string = "/v1.0/image/recapture-detect"
// the uri for Service super resolution
SUPER_RESOLUTION string = "/v1.0/vision/super-resolution"
// the uri for tts
TTS string = "/v1.0/voice/tts"
// the uri for service of moderation video
MODERATION_VIDEO string = "/v1.0/moderation/video"
// the uri for service of long sentence
LONG_SENTENCE string = "/v1.0/voice/asr/long-sentence"
// the uri for service of image content batch result
IMAGE_MODERATION_BATCH_RESULT string = "/v1.0/moderation/image/batch"
// the uri for service of image content batch job id
IMAGE_MODERATION_BATCH_JOBS string = "/v1.0/moderation/image/batch/jobs"
// the uri for service of image content batch result
IMAGE_MODERATION_BATCH string = "/v1.0/moderation/image/batch"
// image service type
IMAGE string = "image"
// moderation service type
MODERATION string = "moderation"
// the max retry times
RETRY_MAX_TIMES int = 3
)
<|start_filename|>javascript/moderation_video_token_demo.js<|end_filename|>
/**
* 视频审核服务token方式请求的使用示例
*/
var video = require("./ais_sdk/moderation_video");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
// obs链接需要和region区域一致,不同的region的obs资源不共享
demo_data_url = "https://obs-test-llg.obs.cn-north-1.myhuaweicloud.com/bgm_recognition";
token.getToken(username, domain_name, password, function (token) {
video.video(token, demo_data_url, 5, ["terrorism", "porn", "politics"], function (result) {
console.log(result);
})
});
<|start_filename|>java/ais-moderation-java-sdk/src/main/java/com/huawei/ais/demo/moderation/ModerationImageContentDemo.java<|end_filename|>
package com.huawei.ais.demo.moderation;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.huawei.ais.demo.ResponseProcessUtils;
import com.huawei.ais.demo.ServiceAccessBuilder;
import com.huawei.ais.sdk.AisAccess;
import com.huawei.ais.sdk.AisAccessWithProxy;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.FileUtils;
import org.apache.http.HttpResponse;
import org.apache.http.entity.StringEntity;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
/**
* 图像内容检测服务的使用示例类
*/
public class ModerationImageContentDemo {
private AisAccess service;
public ModerationImageContentDemo() {
// 1. 配置好访问图像内容检测服务的基本信息,生成对应的一个客户端连接对象
service = ServiceAccessBuilder.builder()
.ak("######") // your ak
.sk("######") // your sk
.region("cn-north-1") // 内容审核服务目前支持华北-北京一(cn-north-1)以及亚太-香港(ap-southeast-1)
.connectionTimeout(5000) // 连接目标url超时限制
.connectionRequestTimeout(1000) // 连接池获取可用连接超时限制
.socketTimeout(20000) // 获取服务器响应数据超时限制
.build();
}
private void imageContentCheck(byte[] imagebytes) throws IOException {
try {
//
// 2.构建访问图像内容检测服务需要的参数
//
String uri = "/v1.0/moderation/image";
String fileBase64Str = Base64.encodeBase64String(imagebytes);
JSONObject json = new JSONObject();
json.put("image", fileBase64Str);
json.put("categories", new String[] {"politics"}); //检测内容
json.put("threshold", 0);
StringEntity stringEntity = new StringEntity(json.toJSONString(), "utf-8");
// 3.传入图像内容检测服务对应的uri参数, 传入图像内容检测服务需要的参数,
// 该参数主要通过JSON对象的方式传入, 使用POST方法调用服务
HttpResponse response = service.post(uri, stringEntity);
// 4.验证服务调用返回的状态是否成功,如果为200, 为成功, 否则失败。
ResponseProcessUtils.processResponseStatus(response);
// 5.处理服务返回的字符流,输出识别结果。
ResponseProcessUtils.processResponse(response);
} catch (Exception e) {
e.printStackTrace();
} finally {
// 6.使用完毕,关闭服务的客户端连接
service.close();
}
}
private void imageAntiporn(byte[] imagebytes) throws IOException {
try {
//
// 2.构建访问图像内容检测服务需要的参数
//
String uri = "/v1.1/moderation/image/anti-porn";
String fileBase64Str = Base64.encodeBase64String(imagebytes);
JSONObject json = new JSONObject();
json.put("image", fileBase64Str);
StringEntity stringEntity = new StringEntity(json.toJSONString(), "utf-8");
// 3.传入图像内容检测服务对应的uri参数, 传入图像内容检测服务需要的参数,
// 该参数主要通过JSON对象的方式传入, 使用POST方法调用服务
HttpResponse response = service.post(uri, stringEntity);
// 4.验证服务调用返回的状态是否成功,如果为200, 为成功, 否则失败。
ResponseProcessUtils.processResponseStatus(response);
// 5.处理服务返回的字符流,输出识别结果。
ResponseProcessUtils.processResponse(response);
} catch (Exception e) {
e.printStackTrace();
} finally {
// 6.使用完毕,关闭服务的客户端连接
service.close();
}
}
public byte[] downloadUrl(String url) throws MalformedURLException, IOException {
InputStream in = new URL(url).openStream();
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buffer = new byte[128];
int n = in.read(buffer);
while (n != -1) {
out.write(buffer, 0, n);
n = in.read(buffer);
}
in.close();
out.close();
return out.toByteArray();
}
//
// 主入口函数
//
public static void main(String[] args) throws IOException {
ModerationImageContentDemo tool = new ModerationImageContentDemo();
byte[] imageBytes = tool.downloadUrl("https://obs-ch-sdk-sample.obs.cn-north-1.myhuaweicloud.com/terrorism.jpg");
tool.imageContentCheck(imageBytes);
tool.imageAntiporn(imageBytes);
imageBytes = FileUtils.readFileToByteArray(new File("data/moderation-demo-1.jpg"));
tool.imageContentCheck(imageBytes);
tool.imageAntiporn(imageBytes);
}
}
<|start_filename|>javascript/asr_bgm_aksk_demo.js<|end_filename|>
/**
* 背景音乐识别服务aksk 方式请求的使用示例
*/
var bgm = require("./ais_sdk/asr_bgm");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var app_key = "**************";
var app_secret = "************";
// obs链接需要和region区域一致,不同的region的obs资源不共享
obsUrl = "https://obs-test-llg.obs.cn-north-1.myhuaweicloud.com/bgm_recognition";
bgm.asr_bgm_aksk(app_key, app_secret, obsUrl, function (result) {
console.log(result);
});
<|start_filename|>javascript/ais_sdk/image_antiporn.js<|end_filename|>
var https = require("https");
var utils = require("./utils");
var signer = require("./signer");
var ais = require("./ais");
module.exports = {
image_antiporn: function (token, data, url, callback) {
var endPoint = utils.getEndPoint(ais.MODERATION_SERVICE);
// 构建请求信息和请求参数信息
var requestData = {"image": data, "url": url};
var requestBody = JSON.stringify(requestData);
var host = endPoint;
var method = "POST";
var uri = ais.IMAGE_ANTI_PORN;
var headers = {"Content-Type": "application/json", "X-Auth-Token": token, "Content-Length": requestBody.length};
var options = utils.getHttpRequestEntityOptions(host, method, uri, headers);
var request = https.request(options, function (response) {
// 验证服务调用返回的状态是否成功,如果为200, 为成功, 否则失败。
if (response.statusCode !== 200) {
console.log('Http status code is: ' + response.statusCode);
}
// 处理结果信息,输入返回信息
response.on("data", function (chunk) {
callback(chunk.toString())
})
});
request.on("error", function (err) {
console.log(err.message);
});
request.write(requestBody);
request.end();
},
image_antiporn_aksk: function (_ak, _sk, data, url, callback) {
// 配置ak,sk信息
var sig = new signer.Signer();
sig.AppKey = _ak; // 构建ak
sig.AppSecret = _sk; // 构建sk
var endPoint = utils.getEndPoint(ais.MODERATION_SERVICE);
// 构建请求信息和请求参数信息
var requestData = {"image": data, "url": url};
var _headers = {"Content-Type": "application/json"};
var req = new signer.HttpRequest();
var options = utils.getHttpRequestEntity(sig, req, endPoint, "POST", ais.IMAGE_ANTI_PORN, "", _headers, requestData);
var requset = https.request(options, function (response) {
// 验证服务调用返回的状态是否成功,如果为200, 为成功, 否则失败。
if (response.statusCode !== 200) {
console.log('Http status code is: ' + response.statusCode);
}
response.on("data", function (chunk) {
callback(chunk.toString());
})
});
requset.on("error", function (err) {
console.log(err.message);
});
requset.write(req.body);
requset.end();
}
};
<|start_filename|>javascript/moderation_text_token_demo.js<|end_filename|>
/**
* 图像内容检测服务token请求方式的使用示例
*/
var text = require("./ais_sdk/moderation_text");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
token.getToken(username, domain_name, password, function (token) {
text.moderation_text(token, [{
"text": "666666luo聊请+110亚砷酸钾六位qq,fuck666666666666666",
"type": "content"}],
["ad", "abuse", "politics", "porn", "contraband"], function (result) {
console.log(result);
})
});
<|start_filename|>java/ais-moderation-java-sdk/src/main/java/com/huawei/ais/demo/vcm/VCMAkskDemo.java<|end_filename|>
package com.huawei.ais.demo.vcm;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.huawei.ais.demo.ServiceAccessBuilder;
import org.apache.http.HttpResponse;
import com.huawei.ais.demo.HttpJsonDataUtils;
import com.huawei.ais.demo.obs.ObsFileHandle;
import com.huawei.ais.demo.obs.SimpleObsClient;
import com.huawei.ais.demo.vcm.model.Category;
import com.huawei.ais.demo.vcm.model.FrameResult;
import com.huawei.ais.demo.vcm.model.JobMetaInfo;
import com.huawei.ais.demo.vcm.model.JobResult;
import com.huawei.ais.demo.vcm.model.JobStatus;
import com.huawei.ais.demo.vcm.model.Suggestion;
import com.huawei.ais.sdk.AisAccess;
/**
* 视频审核API AK/SK方式使用示例
*/
public class VCMAkskDemo {
private static final String SUBMIT_JOB_URI = "/v1.0/moderation/video";
private static final String GET_JOB_RESULT_URI_TEMPLATE = "/v1.0/moderation/video?job_id=%s";
private static final String GET_JOB_LIST_URI = "/v1.0/moderation/video/jobs";
private static final String JSON_ROOT = "result";
private static final long QUERY_JOB_RESULT_INTERVAL = 2000L;
private static final Integer RETRY_MAX_TIMES = 3; // 查询任务失败的最大重试次数
public static void main(String[] args) {
// 1. 配置好访问短视频审核服务的基本信息,生成对应的一个客户端连接对象
AisAccess aisAkskClient = ServiceAccessBuilder.builder()
.ak("######") // your ak
.sk("######") // your sk
.region("cn-north-1") // 内容审核服务目前支持华北-北京一(cn-north-1)以及亚太-香港(ap-southeast-1)
.connectionTimeout(5000) // 连接目标url超时限制
.connectionRequestTimeout(1000) // 连接池获取可用连接超时限制
.socketTimeout(20000) // 获取服务器响应数据超时限制
.build();
SimpleObsClient simpleObsClient = new SimpleObsClient(aisAkskClient);
try {
//
// 2.构建访问视频审核服务需要的参数
//
//obs桶名, 根据需要自定即可
String bucketName = "vcm-sdk-test";
//如果目标桶已存在,则不需要执行创建桶的动作
simpleObsClient.createBucket(bucketName);
//如果视频文件在本地,上传到OBS中,返回OBS文件句柄
ObsFileHandle obsFileHandle = simpleObsClient.uploadFile(bucketName, "data/demo.mp4");
//如果视频已在OBS中,直接获取其OBS文件句柄
//ObsFileHandle obsFileHandle = simpleObsClient.locateFile(bucketName, "demo.mp4");
//生成OBS文件临时授权下载链接
String sharedDownloadUrl = obsFileHandle.generateSharedDownloadUrl();
//
// 3.调用视频审核服务
//
callVCMService(sharedDownloadUrl, aisAkskClient);
//
// 4. 可选动作,任务结束,将文件从obs中删除
//
// obsFileHandle.delete();
} catch (Exception e) {
e.printStackTrace();
} finally {
//
// 5.使用完毕,关闭服务的客户端连接
//
aisAkskClient.close();
simpleObsClient.close();
}
}
private static void callVCMService(String videoUrl, AisAccess service) throws IOException, InterruptedException {
JobMetaInfo jobMetaInfo = new JobMetaInfo();
//设置必选参数
jobMetaInfo.setUrl(videoUrl);
//设置可选参数
jobMetaInfo.setFrameInterval(5);
jobMetaInfo.addCategory(Category.POLITICS);
jobMetaInfo.addCategory(Category.TERRORISM);
jobMetaInfo.addCategory(Category.PORN);
HttpResponse response = service.post(SUBMIT_JOB_URI, HttpJsonDataUtils.ObjectToHttpEntity(jobMetaInfo));
if (!HttpJsonDataUtils.isOKResponded(response)) {
System.out.println("Submit the job failed!");
System.out.println("Request body:" + HttpJsonDataUtils.ObjectToJsonString(jobMetaInfo));
System.out.println(HttpJsonDataUtils.responseToString(response));
return;
}
// 获取到提交成功的任务ID, 准备进行结果的查询
JobResult submitResult = HttpJsonDataUtils.getResponseObject(response, JobResult.class, JSON_ROOT);
String jobId = submitResult.getId();
System.out.println("\nSubmit job successfully, job_id=" + jobId);
// 初始化查询jobId失败次数
Integer retryTimes = 0;
// 构建进行查询的请求链接,并进行轮询查询,由于是异步任务,必须多次进行轮询
// 直到结果状态为任务已处理结束
String url = String.format(GET_JOB_RESULT_URI_TEMPLATE, jobId);
while (true) {
HttpResponse getResponse = service.get(url);
if (!HttpJsonDataUtils.isOKResponded(getResponse)) {
System.out.println("Get " + url);
System.out.println(HttpJsonDataUtils.responseToString(getResponse));
if(retryTimes < RETRY_MAX_TIMES){
retryTimes++;
System.out.println(String.format("Jobs process result failed! The number of retries is %s!", retryTimes));
Thread.sleep(QUERY_JOB_RESULT_INTERVAL);
continue;
}else{
break;
}
}
JobResult jobResult
= HttpJsonDataUtils.getResponseObject(getResponse, JobResult.class, JSON_ROOT);
JobStatus jobStatus = jobResult.getStatus();
// 根据任务状态觉得继续轮询或者打印结果
if (jobStatus == JobStatus.CREATED || jobStatus == JobStatus.RUNNING) {
//如果任务还未处理完,等待一段时间,继续进行轮询
System.out.println("Job " + jobResult.getStatus() + ", waiting...");
Thread.sleep(QUERY_JOB_RESULT_INTERVAL);
} else if (jobStatus == JobStatus.FAILED) {
// 如果处理失败,直接退出
System.out.println("\nJob failed! cause:" + jobResult.getCause());
break;
} else if (jobStatus == JobStatus.FINISH) {
// 任务处理成功,打印结果
System.out.println("\nJob finished!");
processJobFinishedResult(jobResult);
break;
} else {
System.out.println("Should not be here!");
}
}
}
private static void processJobFinishedResult(JobResult jobResult) {
Suggestion generalSuggestion = jobResult.getSuggestion();
System.out.println("\nSuggestion:" + generalSuggestion);
if (generalSuggestion != Suggestion.PASS) {
List<FrameResult> blockFrames = new ArrayList<>();
List<FrameResult> reviewFrames = new ArrayList<>();
jobResult.getFrameResults().forEach(frameResult -> {
switch (frameResult.getFrameSuggestion()) {
case PASS:
break;
case BLOCK:
blockFrames.add(frameResult);
break;
case REVIEW:
reviewFrames.add(frameResult);
break;
}
});
System.out.println("Detail:");
if (!blockFrames.isEmpty()) {
System.out.println("\tFrames blocked:");
blockFrames.forEach(blockFrame -> {
System.out.println(String.format("\t\ttime:%02d~%02ds categories:%s",
blockFrame.getFrameBegin(), blockFrame.getFrameEnd(), blockFrame.getSuspectCategories()));
});
}
if (!reviewFrames.isEmpty()) {
System.out.println("\tFrames need review:");
reviewFrames.forEach(reviewFrame -> {
System.out.println(String.format("\t\ttime:%02d~%02ds categories:%s",
reviewFrame.getFrameBegin(), reviewFrame.getFrameEnd(), reviewFrame.getSuspectCategories()));
});
}
}
}
}
<|start_filename|>javascript/long_sentence_token_demo.js<|end_filename|>
/**
* 长语音识别服务token请求方式的使用示例
*/
var lsen = require("./ais_sdk/long_sentence");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
demo_data_url = "https://obs-ch-sdk-sample.obs.cn-north-1.myhwclouds.com/lsr-1.mp3";
var filepath = "./data/asr-sentence.wav";
var data = utils.changeFileToBase64(filepath);
token.getToken(username, domain_name, password, function (token) {
lsen.long_sentence(token, "", demo_data_url, "", function (result) {
console.log(result)
});
lsen.long_sentence(token, data, "", "", function (result) {
console.log(result)
})
});
<|start_filename|>go/sdk/clarityDetect.go<|end_filename|>
package sdk
import (
"ais_sdk/src/core"
"bytes"
"crypto/tls"
"encoding/json"
"io/ioutil"
"log"
"net/http"
)
// post data by aksk
func ClarityDetectAksk(ak string, sk string, image string, url string, threshold float32) string {
s := core.Signer{
AppKey: ak,
AppSecret: sk,
}
requestBody := make(map[string]interface{})
requestBody["image"] = image
requestBody["url"] = url
requestBody["threshold"] = threshold
bytesData, err := json.Marshal(requestBody)
if err != nil {
return err.Error()
}
reader := bytes.NewBuffer(bytesData)
endpoint := GetEndpoint(core.MODERATION)
uri := "https://" + endpoint + core.IMAGE_CLARITY_DETECT
r, _ := http.NewRequest("POST", uri, reader)
r.Header.Add("content-type", "application/json")
s.Sign(r)
client := &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
},
}
resp, err := client.Do(r)
if err != nil {
log.Println(err.Error())
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.Println(err.Error())
}
return string(body)
}
<|start_filename|>go/demo.go<|end_filename|>
package main
import (
"ais_sdk/src/sdk"
"encoding/json"
"fmt"
)
func main() {
// Services currently support North China-Beijing 1 (cn-north-1), Asia Pacific-Hong Kong (ap-southeast-1)
sdk.InitRegion("cn-north-1")
ak := "*******" // your AppKey
sk := "******" // your AppSecret
// The sample for asr bgm service
// Test_BgmAkskDemo(ak, sk)
// The sample for asr sentence service
// Test_SentenceAkskDemo(ak, sk)
// The sample for image clarity detect service
// Test_ClarityDetectAkskDemo(ak, sk)
// The sample for image celebrity recognition service
// Test_CelebrityRecognitionAkskDemo(ak, sk)
// The sample for image distortion correct service
// Test_DistortionCorrectAkskDemo(ak, sk)
// The sample for image anti porn service
// Test_ImageAntiPornAkskDemo(ak, sk)
// The sample for image content detect service
// Test_ImageModerationAkskDemo(ak, sk)
// The sample for image content batch detect service
// Test_ImageModerationBatchAkskDemo(ak, sk)
// The sample for image defog service
// Test_ImageDefogAkskDemo(ak, sk)
// The sample for image tagging service
// Test_ImageTaggingAkskDemo(ak, sk)
// The sample for instrument recognition service
// Test_InstrumentAkskDemo(ak, sk)
// The sample for image dark enhance
// Test_DarkEnhanceAkskDemo(ak, sk)
// The sample for moderation text
// Test_ModerationTextAkskDemo(ak, sk)
// The sample for recapture detect
// Test_RecaptureDetectAkskDemo(ak, sk)
// The sample for super resolution
// Test_SuperResolutionAkskDemo(ak, sk)
// The sample for text to speech
// Test_TTSAkskDemo(ak, sk)
// The sample for long sentence
// Test_LongSentenceAkskDemo(ak, sk)
// The sample for moderation video
// Test_ModerationVideoAkskDemo(ak, sk)
// The sample for image content batch job detect service
Test_ImageModerationBatchJobAkskDemo(ak, sk)
}
func Test_InstrumentAkskDemo(ak string, sk string) {
// post data by url
url := "http://img.ikstatic.cn/MTU0NjQ2NzM1MTk0NCM5MzAjanBn.jpg"
var threshold float32 = 0.5
result := sdk.InstrumentAksk(ak, sk, "", url, threshold)
fmt.Println(result)
// post data by native file
filepath := "data/instrument.jpg"
image := sdk.ChangeFileToBase64(filepath)
result = sdk.InstrumentAksk(ak, sk, image, "", threshold)
fmt.Println(result)
}
func Test_BgmAkskDemo(ak string, sk string) {
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://obs-test-llg.obs.cn-north-1.myhuaweicloud.com/bgm_recognition"
result := sdk.AsrBgmAksk(ak, sk, url)
fmt.Println(result)
}
func Test_SentenceAkskDemo(ak string, sk string) {
encodeType := "wav"
sampleRate := "16k"
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://obs-ch-sdk-sample.obs.cn-north-1.myhuaweicloud.com/asr-sentence.wav"
result := sdk.AsrSentenceAksk(ak, sk, "", url, encodeType, sampleRate)
fmt.Println(result)
// post data by native file
filepath := "data/asr-sentence.wav"
data := sdk.ChangeFileToBase64(filepath)
result = sdk.AsrSentenceAksk(ak, sk, data, "", encodeType, sampleRate)
fmt.Println(result)
}
func Test_ClarityDetectAkskDemo(ak string, sk string) {
var threshold float32 = 0.8
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/vat-invoice.jpg"
result := sdk.ClarityDetectAksk(ak, sk, "", url, threshold)
fmt.Println(result)
// post data by native file
filepath := "data/moderation-clarity-detect.jpg"
image := sdk.ChangeFileToBase64(filepath)
result = sdk.ClarityDetectAksk(ak, sk, image, "", threshold)
fmt.Println(result)
}
func Test_CelebrityRecognitionAkskDemo(ak string, sk string) {
var threshold float32 = 0.48
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/celebrity-recognition.jpg"
result := sdk.CelebrityRecognitionAksk(ak, sk, "", url, threshold)
fmt.Println(result)
// post data by native file
filepath := "data/celebrity-recognition.jpg"
image := sdk.ChangeFileToBase64(filepath)
result = sdk.CelebrityRecognitionAksk(ak, sk, image, "", threshold)
fmt.Println(result)
}
func Test_DarkEnhanceAkskDemo(ak string, sk string) {
var brightness float32 = 0.9
var resultMap map[string]interface{}
filepath := "data/dark-enhance-demo.bmp"
image := sdk.ChangeFileToBase64(filepath)
result := sdk.DarkEnhanceAksk(ak, sk, image, brightness)
json.Unmarshal([]byte(result), &resultMap)
sdk.Base64ToFile("data/dark-enhance-demo-aksk.bmp", resultMap["result"].(string))
}
func Test_DistortionCorrectAkskDemo(ak string, sk string) {
var correction bool = true
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/vat-invoice.jpg"
result := sdk.DistortionCorrectAksk(ak, sk, "", url, correction)
fmt.Println(result)
// post data by native file
filepath := "data/modeation-distortion.jpg"
image := sdk.ChangeFileToBase64(filepath)
result = sdk.DistortionCorrectAksk(ak, sk, image, "", correction)
fmt.Println(result)
}
func Test_ImageAntiPornAkskDemo(ak string, sk string) {
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/antiporn.jpg"
result := sdk.ImageAntiPornAksk(ak, sk, "", url)
fmt.Println(result)
// post data by native file
filepath := "data/moderation-antiporn.jpg"
image := sdk.ChangeFileToBase64(filepath)
result = sdk.ImageAntiPornAksk(ak, sk, image, "")
fmt.Println(result)
}
func Test_ImageModerationAkskDemo(ak string, sk string) {
var categories = []string{"politics", "terrorism", "porn"}
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/terrorism.jpg"
result := sdk.ImageModerationAksk(ak, sk, "", url, categories)
fmt.Println(result)
// post data by native file
filepath := "data/moderation-terrorism.jpg"
image := sdk.ChangeFileToBase64(filepath)
result = sdk.ImageModerationAksk(ak, sk, image, "", categories)
fmt.Println(result)
}
func Test_ImageModerationBatchAkskDemo(ak string, sk string) {
var categories = []string{"politics", "terrorism", "porn"}
// The OBS link should match the region, and the OBS resources of different regions are not shared
url1 := "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/terrorism.jpg"
url2 := "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/antiporn.jpg"
var urls = []string{url1, url2}
result := sdk.ImageModerationBatchAksk(ak, sk, urls, categories)
fmt.Println(result)
}
func Test_ImageDefogAkskDemo(ak string, sk string) {
var resultMap map[string]interface{}
var naturalLook bool = true
var gamma float32 = 1.5
filepath := "data/defog-demo.png"
image := sdk.ChangeFileToBase64(filepath)
result := sdk.ImageDefogAksk(ak, sk, image, gamma, naturalLook)
json.Unmarshal([]byte(result), &resultMap)
sdk.Base64ToFile("data/defog-demo-aksk.png", resultMap["result"].(string))
}
func Test_ImageTaggingAkskDemo(ak string, sk string) {
var language string = "en"
var limit int = -1
var threshold float32 = 60.0
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://ais-sample-data.obs.myhuaweicloud.com/tagging-normal.jpg"
result := sdk.ImageTaggingAksk(ak, sk, "", url, language, limit, threshold)
fmt.Println(result)
// post data by native file
filepath := "data/image-tagging-demo.jpg"
image := sdk.ChangeFileToBase64(filepath)
result = sdk.ImageTaggingAksk(ak, sk, image, "", language, limit, threshold)
fmt.Println(result)
}
func Test_ModerationTextAkskDemo(ak string, sk string) {
var categories = []string{"ad", "politics", "flood", "politics", "contraband", "contraband"}
var text string = "666聊请+110亚砷酸钾六位qq,fuck666666666666666sssssssssss"
var types string = "content"
result := sdk.ModerationTextAksk(ak, sk, categories, text, types)
fmt.Println(result)
}
func Test_RecaptureDetectAkskDemo(ak string, sk string) {
var scene = []string{"recapture"}
var threshold float32 = 0.95
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://ais-sample-data.obs.myhuaweicloud.com/recapture-detect.jpg"
result := sdk.RecaptureDetectAksk(ak, sk, "", url, threshold, scene)
fmt.Println(result)
// post data by native file
filepath := "data/recapture-detect-demo.jpg"
image := sdk.ChangeFileToBase64(filepath)
result = sdk.RecaptureDetectAksk(ak, sk, image, "", threshold, scene)
fmt.Println(result)
}
func Test_SuperResolutionAkskDemo(ak string, sk string) {
var resultMap map[string]interface{}
var scale int = 3
var model string = "ESPCN"
filepath := "data/super-resolution-demo.png"
image := sdk.ChangeFileToBase64(filepath)
result := sdk.SuperResolutionAksk(ak, sk, image, scale, model)
json.Unmarshal([]byte(result), &resultMap)
sdk.Base64ToFile("data/super-resolution-demo-aksk.png", resultMap["result"].(string))
}
func Test_TTSAkskDemo(ak string, sk string) {
resultMap := make(map[string]map[string]string)
resultData := make(map[string]string)
var text string = "This is a test sample"
var voiceName string = "xiaoyan"
var volume int = 0
var sampleRate string = "16k"
var speechSpeed int = 0
var pitchRate int = 0
result := sdk.TtsAksk(ak, sk, text, voiceName, volume, sampleRate, speechSpeed, pitchRate)
json.Unmarshal([]byte(result), &resultMap)
resultData = resultMap["result"]
sdk.Base64ToFile("data/tts_token_sample.wav", resultData["data"])
}
func Test_LongSentenceAkskDemo(ak string, sk string) {
// post data by url
url := "https://ais-sample-data.obs.myhuaweicloud.com/lsr-1.mp3"
result := sdk.LongSentenceAksk(ak, sk, "", url)
fmt.Println(result)
// post data by native file
filepath := "data/asr-sentence.wav"
data := sdk.ChangeFileToBase64(filepath)
result = sdk.LongSentenceAksk(ak, sk, data, "")
fmt.Println(result)
}
func Test_ModerationVideoAkskDemo(ak string, sk string) {
var frameInterval int = 5
var categories = []string{"politics", "terrorism", "porn"}
// The OBS link should match the region, and the OBS resources of different regions are not shared
url := "https://obs-test-llg.obs.cn-north-1.myhuaweicloud.com/bgm_recognition"
result := sdk.ModerationVideoAksk(ak, sk, url, frameInterval, categories)
fmt.Println(result)
}
func Test_ImageModerationBatchJobAkskDemo(ak string, sk string) {
// The OBS link should match the region, and the OBS resources of different regions are not shared
var url1 string = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/terrorism.jpg"
var url2 string = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/antiporn.jpg"
var urls = []string{url1, url2}
var categories = []string{"politics", "terrorism", "porn"}
result := sdk.ImageModerationBatchJobsAksk(ak, sk, urls, categories)
fmt.Println(result)
}
<|start_filename|>javascript/tts_token_demo.js<|end_filename|>
/**
* 语音合成服务token请求方式的使用示例
*/
var sectence = require("./ais_sdk/tts");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
token.getToken(username, domain_name, password, function (token) {
sectence.tts(token, "This is a test sample", "xiaoyan", 0, "16k", 0, 0, function (result) {
var resultObj = JSON.parse(result);
utils.getFileByBase64Str("./data/tts_token_sample.wav", resultObj.result.data);
})
});
<|start_filename|>c#/src/Models/tts.cs<|end_filename|>
using System;
using System.Net;
using System.Text;
using Newtonsoft.Json.Linq;
namespace Ais.Models
{
public class TTS
{
public static String TTSToken(String token, String text, String voice_name, int volume, String sample_rate, int speech_speed, int pitch_rate, String endpoint)
{
// reuqest data for tts
JObject requestBody = new JObject();
requestBody.Add("text", text);
requestBody.Add("voice_name", voice_name);
requestBody.Add("volume", volume);
requestBody.Add("sample_rate", sample_rate);
requestBody.Add("speech_speed", speech_speed);
HttpWebRequest request = null;
String result = null;
String uri = new StringBuilder().Append("https://").Append(endpoint).Append(Ais.TTS).ToString();
String serviceName = System.Reflection.MethodBase.GetCurrentMethod().Name;
return utils.PostData(request, uri, token, requestBody, result, serviceName);
}
}
}
<|start_filename|>javascript/recapture_detect_aksk_demo.js<|end_filename|>
/**
* 翻拍识别服务ak,sk请求方式的使用示例
*/
var recapture = require("./ais_sdk/recapture_detect");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var app_key = "*************";
var app_secret = "************";
var filepath = "./data/recapture-detect-demo.jpg";
var data = utils.changeFileToBase64(filepath);
// obs链接需要和region区域一致,不同的region的obs资源不共享
demo_data_url = "https://ais-sample-data.obs.myhuaweicloud.com/recapture-detect.jpg";
recapture.recapture_detect_aksk(app_key, app_secret, data, "", 0.99, ['recapture'], function (result) {
console.log(result);
});
recapture.recapture_detect_aksk(app_key, app_secret, "", demo_data_url, 0.99, ['recapture'], function (result) {
console.log(result);
});
<|start_filename|>go/sdk/utils.go<|end_filename|>
package sdk
import (
"encoding/base64"
"io/ioutil"
"log"
"net/http"
"os"
)
var ENDPOINTS = make(map[string]map[string]string)
var region = "cn-north-1"
func init(){
var imageMap = make(map[string]string)
var moderationMap = make(map[string]string)
imageMap["cn-north-1"] = "image.cn-north-1.myhuaweicloud.com"
imageMap["ap-southeast-1"] = "image.ap-southeast-1.myhuaweicloud.com"
moderationMap["cn-north-1"] = "moderation.cn-north-1.myhuaweicloud.com"
moderationMap["ap-southeast-1"] = "moderation.ap-southeast-1.myhuaweicloud.com"
ENDPOINTS["image"] = imageMap
ENDPOINTS["moderation"] = moderationMap
}
func ChangeFileToBase64(filepath string) string {
ff, _ := os.Open(filepath)
defer ff.Close()
imgByte, _ := ioutil.ReadAll(ff)
encodeString := base64.StdEncoding.EncodeToString(imgByte)
return encodeString
}
func DownFileByUrl(url string) string {
res, err := http.Get(url)
if err != nil {
log.Println(err.Error())
return ""
}
defer res.Body.Close()
imgByte, _ := ioutil.ReadAll(res.Body)
encodeString := base64.StdEncoding.EncodeToString(imgByte)
return encodeString
}
func Base64ToFile(filePath string, base64Str string) {
buffer, _ := base64.StdEncoding.DecodeString(base64Str)
err := ioutil.WriteFile(filePath, buffer, 0666)
if err != nil {
log.Println(err.Error())
}
}
func IsOkResponse(statusCode int) bool {
return statusCode >= 200 && statusCode <= 300
}
func InitRegion(regionName string){
region = regionName
}
func GetEndpoint(typeName string) string{
var endpointMap = make(map[string]string)
endpointMap = ENDPOINTS[typeName]
return endpointMap[region]
}
<|start_filename|>javascript/image_defog_token_demo.js<|end_filename|>
/**
* 图像去雾服务token 方式请求的使用示例
*/
var defog = require("./ais_sdk/image_defog");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var username = "*************"; // 配置用户名
var domain_name = "*************"; // 配置用户名
var password = "*************"; // 密码
var filepath = "./data/defog-demo.png";
var data = utils.changeFileToBase64(filepath);
token.getToken(username, domain_name, password, function (token) {
defog.defog(token, data, 1.5, true, function (result) {
var resultObj = JSON.parse(result);
utils.getFileByBase64Str("./data/defog-demo-token.png", resultObj.result);
})
});
<|start_filename|>javascript/super_resolution_token_demo.js<|end_filename|>
/**
* 超分重建服务token方式请求的使用示例
*/
var supresol = require("./ais_sdk/super_resolution");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
var filepath = "./data/super-resolution-demo.png";
var data = utils.changeFileToBase64(filepath);
/**
* token 方式获取结果
* @type {string}
*/
token.getToken(username, domain_name, password, function (token) {
supresol.super_resolution(token, data, 3, "ESPCN", function (result) {
var resultObj = JSON.parse(result);
utils.getFileByBase64Str("./data/super-resolution-demo-token.png", resultObj.result);
})
});
<|start_filename|>javascript/long_sentence_aksk_demo.js<|end_filename|>
/**
* 长语音识别服务ak,sk请求方式的使用示例
*/
var lsen = require("./ais_sdk/long_sentence");
var utils = require("./ais_sdk/utils");
var app_key = "*************";
var app_secret = "************";
demo_data_url = "https://obs-ch-sdk-sample.obs.cn-north-1.myhwclouds.com/lsr-1.mp3";
var filepath = "./data/asr-sentence.wav";
var data = utils.changeFileToBase64(filepath);
lsen.long_sentence_aksk(app_key, app_secret, "", demo_data_url, "", function (result) {
console.log(result)
});
lsen.long_sentence_aksk(app_key, app_secret, data, "", "", function (result) {
console.log(result)
});
<|start_filename|>javascript/asr_sentence_token_demo.js<|end_filename|>
/**
* 语音识别服务token 方式请求的使用示例
*/
var sentence = require("./ais_sdk/asr_sentence");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
var filepath = "./data/asr-sentence.wav";
var data = utils.changeFileToBase64(filepath);
obsUrl = "https://obs-ch-sdk-sample.obs.cn-north-1.myhwclouds.com/asr-sentence.wav";
token.getToken(username, domain_name, password, function (token) {
sentence.asr_scentence(token, data, "", "wav", "16k", function (result) {
console.log(result);
});
sentence.asr_scentence(token, "", obsUrl, "wav", "16k", function (result) {
console.log(result.toString());
})
});
<|start_filename|>javascript/moderation_text_aksk_demo.js<|end_filename|>
/**
* 图像内容检测服务ak,sk方式请求的使用示例
*/
var text = require("./ais_sdk/moderation_text");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var app_key = "*************";
var app_secret = "************";
text.moderation_text_aksk(app_key, app_secret, [{
"text": "666666luo聊请+110亚砷酸钾六位qq,fuck666666666666666",
"type": "content"
}], ["ad", "abuse", "politics", "porn", "contraband"], function (result) {
console.log(result);
});
<|start_filename|>java/ais-image-java-sdk/src/main/java/com/huawei/ais/demo/ServiceAccessBuilder.java<|end_filename|>
package com.huawei.ais.demo;
import com.huawei.ais.common.AuthInfo;
import com.huawei.ais.common.ProxyHostInfo;
import com.huawei.ais.sdk.AisAccess;
import com.huawei.ais.sdk.AisAccessWithProxy;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* 此处为服务入口的构建函数,主要用于初始化Service Access的一些通用信息:
*
* 包括 Endpoint(服务端点), Region(区域),Access key(接入码) / Secret access key(安全接入码),
* 以及http 请求相关超时参数
*
*/
public class ServiceAccessBuilder {
private static Map<String, String> endponitMap = new ConcurrentHashMap<>();
static {
/* 图像识别服务的区域和终端节点信息可以从如下地址查询
* http://developer.huaweicloud.com/dev/endpoint
* */
endponitMap.put("cn-north-1", "https://image.cn-north-1.myhuaweicloud.com");
endponitMap.put("ap-southeast-1", "https://image.ap-southeast-1.myhuaweicloud.com");
}
private String region;
private String endpoint;
private String ak;
private String sk;
private ProxyHostInfo proxy = null;
private int connectionTimeout = 5000;
private int connectionRequestTimeout = 1000;
private int socketTimeout = 5000;
public static ServiceAccessBuilder builder() {
return new ServiceAccessBuilder();
}
public AisAccess build() {
if (proxy == null) {
return new AisAccess(new AuthInfo(endpoint, region, ak, sk), connectionTimeout,connectionRequestTimeout,socketTimeout);
} else {
return new AisAccessWithProxy(new AuthInfo(endpoint, region, ak, sk), proxy, connectionTimeout,connectionRequestTimeout, socketTimeout);
}
}
public ServiceAccessBuilder ak(String ak) {
this.ak = ak;
return this;
}
public ServiceAccessBuilder sk(String sk) {
this.sk = sk;
return this;
}
public ServiceAccessBuilder region(String region) {
this.region = region;
this.endpoint = getCurrentEndpoint(region);
return this;
}
public ServiceAccessBuilder proxy(ProxyHostInfo proxy) {
this.proxy = proxy;
return this;
}
public ServiceAccessBuilder connectionTimeout(int connectionTimeout) {
this.connectionTimeout = connectionTimeout;
return this;
}
public ServiceAccessBuilder connectionRequestTimeout(int connectionRequestTimeout) {
this.connectionRequestTimeout = connectionRequestTimeout;
return this;
}
public ServiceAccessBuilder socketTimeout(int socketTimeout) {
this.socketTimeout = socketTimeout;
return this;
}
/**
* 用于支持使用代理模式访问网络, 此时使用的代理主机配置信息
*/
public static ProxyHostInfo getProxyHost() {
return new ProxyHostInfo("proxycn2.***.com", /* 代理主机信息 */
8080, /* 代理主机的端口 */
"china/***", /* 代理的用户名 */
"***" /* 代理用户对应的密码 */
);
}
/**
* 用于根据服务的区域信息获取服务域名
*/
public static String getCurrentEndpoint(String region){
return endponitMap.get(region);
}
}
<|start_filename|>javascript/asr_bgm_token_demo.js<|end_filename|>
/**
* 背景音乐识别服务token 方式请求的使用示例
*/
var bgm = require("./ais_sdk/asr_bgm");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
// obs链接需要和region区域一致,不同的region的obs资源不共享
obsUrl = "https://obs-test-llg.obs.cn-north-1.myhuaweicloud.com/bgm_recognition";
token.getToken(username, domain_name, password, function (token) {
bgm.asr_bgm(token, obsUrl, function (result) {
console.log(result);
});
});
<|start_filename|>c#/demo/AisServiceSample.cs<|end_filename|>
using System;
using Ais.Models;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace AisDemo
{
class AisServiceSample
{
static void Main(string[] args)
{
// Services currently support North China-Beijing 1 (cn-north-1), Asia Pacific-Hong Kong (ap-southeast-1)
String regionName = "cn-north-1";
String username = "*******";
String password = "*******";
String domainName = "*******";
// domain names for image service
String IMAGE_ENDPOINT = Image.getEndponit(regionName);
// domain names for moderation service
String MODERATION_ENDPOINT = Moderation.getEndponit(regionName);
// domain names for tts service
String TTS_ENDPOINT = "sis.cn-north-1.myhuaweicloud.com";
// domain names for asr service
String ASR_ENDPOINT = "sis.cn-north-1.myhuaweicloud.com";
// get token domain name
String IAM_ENPOINT = "iam.cn-north-1.myhuaweicloud.com";
String token = Authentication.GetToken(username, domainName, password, regionName, IAM_ENPOINT);
// image tagging service example
ImageTagging(token, IMAGE_ENDPOINT);
// asr bgm service example
AsrBgm(token, IMAGE_ENDPOINT);
// celebrity recognition service example
CelebrityRecognition(token, IMAGE_ENDPOINT);
// dark enhance service example
DarkEnhance(token, IMAGE_ENDPOINT);
// image defog detect service example
ImageDefog(token, IMAGE_ENDPOINT);
// image recapture detect service example
RecaptureDetect(token, IMAGE_ENDPOINT);
// image super resolution service example
SuperResolution(token, IMAGE_ENDPOINT);
// distortion correction service example
DistortionCorrect(token, MODERATION_ENDPOINT);
// clarity detect service example
ClarityDetect(token, MODERATION_ENDPOINT);
// image anti porn service example
AntiPorn(token, MODERATION_ENDPOINT);
// image content detect service example
ImageContent(token, MODERATION_ENDPOINT);
// moderation text detect service example
ModerationText(token, MODERATION_ENDPOINT);
// moderation video service example
ModerationVideo(token, MODERATION_ENDPOINT);
// image content batch jobs service example
ImageContentBatchJobs(token, MODERATION_ENDPOINT);
// image content batch service example
ImageContentBatch(token, MODERATION_ENDPOINT);
// asr sentence service example
AsrSentence(token, ASR_ENDPOINT);
// long sentence service example
LongSentence(token, ASR_ENDPOINT);
// text to speech service example
Tts(token, TTS_ENDPOINT);
}
private static void ImageTagging(String token, String endpoint)
{
// The obs url of file
String dataUrl = "";
// The confidence interval
float threshold = 60;
// The tagging language
String language = "en";
// The tagging amount limit of return
int limit = 5;
// post data by native file
String image = utils.ConvertFileToBase64("../../data/image-tagging-demo.jpg");
String reslut = Image.ImageTaggingToken(token, image, dataUrl, threshold, language, limit, endpoint);
Console.WriteLine(reslut);
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://ais-sample-data.obs.myhuaweicloud.com/tagging-normal.jpg";
// post data by obs url
reslut = Image.ImageTaggingToken(token, "", dataUrl, 60, "en", 5, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void AsrBgm(String token, String endpoint)
{
// The OBS link must match the region, and the OBS resources of different regions are not shared
String dataUrl = "https://obs-test-llg.obs.cn-north-1.myhuaweicloud.com/bgm_recognition";
// post data by obs url
String reslut = Asr.AsrBgmToken(token, dataUrl, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void AsrSentence(String token, String endpoint)
{
// sentence flie type
String encode_type = "wav";
// sampling rate of speech,just adapt to the file suffix is ".wav"
String sample_rate = "16k";
// the obs url
String dataUrl = "";
// post data by native file
String data = utils.ConvertFileToBase64("../../data/asr-sentence.wav");
String reslut = Asr.AsrSentenceToken(token, data, dataUrl, encode_type, sample_rate, endpoint);
Console.WriteLine(reslut);
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://obs-ch-sdk-sample.obs.cn-north-1.myhwclouds.com/asr-sentence.wav";
// post data by obs url
reslut = Asr.AsrSentenceToken(token, "", dataUrl, encode_type, sample_rate, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void CelebrityRecognition(String token, String endpoint)
{
// The obs url of file
String dataUrl = "";
// The confidence interval,default 0.48f (0-1)
float threshold = 0.48f;
// post data by native file
String data = utils.ConvertFileToBase64("../../data/celebrity-recognition.jpg");
String reslut = Image.CelebrityRecognitionToken(token, data, dataUrl, threshold, endpoint);
Console.WriteLine(reslut);
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/celebrity-recognition.jpg";
// post data by obs url
reslut = Image.CelebrityRecognitionToken(token, "", dataUrl, threshold, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void ClarityDetect(String token, String endpoint)
{
// The obs url of file
String dataUrl = "";
// The clarity confidence interval,default 0.8f
float threshold = 0.8f;
// post data by native file
String data = utils.ConvertFileToBase64("../../data/moderation-clarity-detect.jpg");
String reslut = Moderation.ClarityDetectToken(token, data, dataUrl, threshold, endpoint);
Console.WriteLine(reslut);
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/vat-invoice.jpg";
// post data by obs url
reslut = Moderation.ClarityDetectToken(token, "", dataUrl, threshold, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void DarkEnhance(String token, String endpoint)
{
// The brightness interval,default 0.9f
float brightness = 0.9f;
// post data by native file
String data = utils.ConvertFileToBase64("../../data/dark-enhance-demo.bmp");
String reslut = Image.DarkEnhanceToken(token, data, brightness, endpoint);
JObject joResult = (JObject)JsonConvert.DeserializeObject(reslut);
String filePath = utils.Base64ToFileAndSave(joResult["result"].ToString(), @"../../data/dark-enhance-token.bmp");
Console.WriteLine(filePath);
Console.ReadKey();
}
private static void DistortionCorrect(String token, String endpoint)
{
// The obs url of file
String dataUrl = "";
// Whether to correct distortion or not
bool correction = false;
// post data by native file
String data = utils.ConvertFileToBase64("../../data/modeation-distortion.jpg");
String reslut = Moderation.DistortionCorrectToken(token, data, dataUrl, correction, endpoint);
JObject joResult = (JObject)JsonConvert.DeserializeObject(reslut);
if (joResult["result"]["data"].ToString() != "")
{
String resultPath = @"../../data/modeation-distortion-token-1.bmp";
resultPath = utils.Base64ToFileAndSave(joResult["result"]["data"].ToString(), resultPath);
Console.WriteLine(resultPath);
}
else
{
Console.WriteLine(reslut);
}
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/vat-invoice.jpg";
// post data by obs url
reslut = Moderation.DistortionCorrectToken(token, "", dataUrl, correction, endpoint);
joResult = (JObject)JsonConvert.DeserializeObject(reslut);
if (joResult["result"]["data"].ToString() != "")
{
String resultPath = @"../../data/modeation-distortion-token-2.bmp";
resultPath = utils.Base64ToFileAndSave(joResult["result"]["data"].ToString(), resultPath);
Console.WriteLine(resultPath);
}
else
{
Console.WriteLine(reslut);
}
Console.ReadKey();
}
private static void AntiPorn(String token, String endpoint)
{
// The obs url of file
String dataUrl = "";
// post data by native file
String data = utils.ConvertFileToBase64("../../data/moderation-antiporn.jpg");
String reslut = Moderation.AntiPornToken(token, data, dataUrl, endpoint);
Console.WriteLine(reslut);
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/antiporn.jpg";
// post data by obs url
reslut = Moderation.AntiPornToken(token, "", dataUrl, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void ImageContent(String token, String endpoint)
{
// The obs url of file
String dataUrl = "";
// The image content confidence interval,"politics" default 0.48f,"terrorism":0
float threshold = 0.6f;
JArray categories = new JArray();
categories.Add("politics");
categories.Add("terrorism");
categories.Add("porn");
// post data by native file
String data = utils.ConvertFileToBase64("../../data/moderation-terrorism.jpg");
String reslut = Moderation.ImageContentToken(token, data, dataUrl, threshold, categories, endpoint);
Console.WriteLine(reslut);
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/terrorism.jpg";
// post data by obs url
reslut = Moderation.ImageContentToken(token, "", dataUrl, threshold, categories, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void ImageContentBatch(String token, String endpoint)
{
// The OBS link must match the region, and the OBS resources of different regions are not shared
String dataUrl1 = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/terrorism.jpg";
String dataUrl2 = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/antiporn.jpg";
JArray urls = new JArray();
urls.Add(dataUrl1);
urls.Add(dataUrl2);
float threshold = 0.6f;
JArray categories = new JArray();
categories.Add("politics");
categories.Add("terrorism");
categories.Add("porn");
String reslut = Moderation.ImageContentBatchToken(token, urls, threshold, categories, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void ImageContentBatchJobs(String token, String endpoint)
{
// The OBS link must match the region, and the OBS resources of different regions are not shared
String dataUrl1 = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/terrorism.jpg";
String dataUrl2 = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/antiporn.jpg";
JArray urls = new JArray();
urls.Add(dataUrl1);
urls.Add(dataUrl2);
JArray categories = new JArray();
categories.Add("politics");
categories.Add("terrorism");
categories.Add("porn");
String reslut = Moderation.ImageContentBatchJobsToken(token, urls, categories, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void ImageDefog(String token, String endpoint)
{
// Is natural
bool natural_look = true;
// The gama correction value,default 1.5. range : [0.1,10]
float gamma = 1.5f;
// post data by native file
String data = utils.ConvertFileToBase64("../../data/defog-demo.png");
String reslut = Image.ImageDefogToken(token, data, gamma, natural_look, endpoint);
JObject joResult = (JObject)JsonConvert.DeserializeObject(reslut);
String resultPath = utils.Base64ToFileAndSave(joResult["result"].ToString(), @"../../data/defog-demo-token.png");
Console.WriteLine(resultPath);
Console.ReadKey();
}
private static void ModerationText(String token, String endpoint)
{
JArray categories = new JArray();
categories.Add("politics");
categories.Add("porn");
categories.Add("contraband");
categories.Add("ad");
JArray items = new JArray();
JObject content = new JObject();
content.Add("text", "666666luo聊请+110亚砷酸钾六位qq,fuck666666666666666");
content.Add("type", "content");
items.Add(content);
String reslut = Moderation.ModerationTextToken(token, categories, items, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void RecaptureDetect(String token, String endpoint)
{
// The obs url of file
String dataUrl = "";
// The image content confidence interval,"politics" default 0.95f.range of [0-1]
float threshold = 0.95f;
// The scene of recapture detect
JArray scene = new JArray();
scene.Add("recapture");
// post data by native file
String data = utils.ConvertFileToBase64("../../data/recapture-detect-demo.jpg");
String reslut = Image.RecaptureDetectToken(token, data, dataUrl, threshold, scene, endpoint);
Console.WriteLine(reslut);
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://ais-sample-data.obs.myhuaweicloud.com/recapture-detect.jpg";
// post data by obs url
reslut = Image.RecaptureDetectToken(token, "", dataUrl, threshold, scene, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void SuperResolution(String token, String endpoint)
{
int scale = 3; // The result of magnification, default 3 ,only 3 or 4
String model = "ESPCN"; // The algorithm pattern,default ESPCN
// post data by native file
String data = utils.ConvertFileToBase64("../../data/super-resolution-demo.png");
String reslut = Image.SuperResolutionToken(token, data, scale, model, endpoint);
JObject joResult = (JObject)JsonConvert.DeserializeObject(reslut);
String filePath = utils.Base64ToFileAndSave(joResult["result"].ToString(), @"../../data/super-resolution-token.png");
Console.WriteLine(filePath);
Console.ReadKey();
}
private static void Tts(String token, String endpoint)
{
// Text in speech synthesis
String text = "This is a test sample";
// The voice name for voice output
String voice_name = "xiaoyan";
// The volume for voice output. [-20, 20],default value is 0。
int volume = 0;
// The sample rate
String sample_rate = "16k";
// The range of values is [-500, 500]
int speech_speed = 0;
// The range of values is [-500, 500]
int pitch_rate = 0;
// post data by native file
String reslut = TTS.TTSToken(token, text, voice_name, volume, sample_rate, speech_speed, pitch_rate, endpoint);
JObject joResult = (JObject)JsonConvert.DeserializeObject(reslut);
String filePath = utils.Base64ToFileAndSave(joResult["result"]["data"].ToString(), @"../../data/tts_token_sample.wav");
Console.WriteLine(filePath);
Console.ReadKey();
}
private static void ModerationVideo(String token, String endpoint)
{
JArray categories = new JArray();
categories.Add("terrorism");
categories.Add("porn");
categories.Add("politics");
// The OBS link must match the region, and the OBS resources of different regions are not shared
String url = "https://obs-test-llg.obs.cn-north-1.myhuaweicloud.com/bgm_recognition";
// Frame time interval
int frame_interval = 5;
String reslut = Moderation.VideoToken(token, url, frame_interval, categories, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
private static void LongSentence(String token, String endpoint)
{
// The obs url of file
String dataUrl = "";
// The scene of detect
String categories = "common";
// post data by native file
String data = utils.ConvertFileToBase64("../../data/asr-sentence.wav");
String reslut = Asr.LongSentenceToken(token, data, dataUrl, categories, endpoint);
Console.WriteLine(reslut);
// The OBS link must match the region, and the OBS resources of different regions are not shared
dataUrl = "https://obs-ch-sdk-sample.obs.cn-north-1.myhwclouds.com/lsr-1.mp3";
// post data by obs url
reslut = Asr.LongSentenceToken(token, "", dataUrl, categories, endpoint);
Console.WriteLine(reslut);
Console.ReadKey();
}
}
}
<|start_filename|>java/ais-image-java-sdk/src/main/java/com/huawei/ais/demo/deblur/DarkEnhanceDemo.java<|end_filename|>
package com.huawei.ais.demo.deblur;
import java.io.File;
import java.io.IOException;
import com.huawei.ais.demo.ServiceAccessBuilder;
import com.huawei.ais.sdk.AisAccessWithProxy;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.FileUtils;
import org.apache.http.HttpResponse;
import com.alibaba.fastjson.JSONObject;
import com.huawei.ais.demo.ResponseProcessUtils;
import com.huawei.ais.sdk.AisAccess;
/**
* 低光照增强服务的使用示例类
*/
public class DarkEnhanceDemo {
//
// 低光照增强服务的使用示例函数
//
private static void darkEnhanceDemo() throws IOException {
// 1. 配置好低光照增强服务的的基本信息,生成对应的一个客户端连接对象
AisAccess service = ServiceAccessBuilder.builder()
.ak("######") // your ak
.sk("######") // your sk
.region("cn-north-1") // 图像识别服务目前支持华北-北京一(cn-north-1)以及亚太-香港(ap-southeast-1)
.connectionTimeout(5000) // 连接目标url超时限制
.connectionRequestTimeout(1000) // 连接池获取可用连接超时限制
.socketTimeout(20000) // 获取服务器响应数据超时限制
.build();
try {
//
// 2.构建访问低光照增强服务需要的参数
//
String uri = "/v1.0/vision/dark-enhance";
byte[] fileData = FileUtils.readFileToByteArray(new File("data/dark-enhance-demo-1.bmp"));
String fileBase64Str = Base64.encodeBase64String(fileData);
JSONObject json = new JSONObject();
json.put("image", fileBase64Str);
json.put("brightness", 0.9);
// 3.传入低光照增强服务对应的uri参数, 传入低光照增强服务需要的参数,
// 该参数主要通过JSON对象的方式传入, 使用POST方法调用服务
HttpResponse response = service.post(uri, json.toJSONString());
// 4.验证服务调用返回的状态是否成功,如果为200, 为成功, 否则失败。
ResponseProcessUtils.processResponseStatus(response);
// 5.处理服务返回的字符流,生成对应的低光照增强处理后对应的图片文件。
ResponseProcessUtils.processResponseWithImage(response, "data/dark-enhance-demo-1.cooked.bmp");
} catch (Exception e) {
e.printStackTrace();
} finally {
// 6.使用完毕,关闭服务的客户端连接
service.close();
}
}
//
// 主入口函数
//
public static void main(String[] args) throws IOException {
// 测试入口函数
darkEnhanceDemo();
}
}
<|start_filename|>javascript/image_moderation_token_demo.js<|end_filename|>
/**
* 图像内容检测服务token请求方式的使用示例
*/
var content = require("./ais_sdk/image_moderation");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
var filepath = "./data/moderation-terrorism.jpg";
var data = utils.changeFileToBase64(filepath);
// obs链接需要和region区域一致,不同的region的obs资源不共享
demo_data_url = "https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/terrorism.jpg";
token.getToken(username, domain_name, password, function (token) {
content.image_content(token, data, "", ["politics"], "", function (result) {
console.log(result);
});
content.image_content(token, "", demo_data_url, ["politics"], "", function (result) {
console.log(result);
})
});
<|start_filename|>java/ais-image-java-sdk/src/main/java/com/huawei/ais/demo/image/ASRBgmDemo.java<|end_filename|>
package com.huawei.ais.demo.image;
import java.io.IOException;
import com.huawei.ais.demo.ServiceAccessBuilder;
import org.apache.http.HttpResponse;
import com.alibaba.fastjson.JSONObject;
import com.huawei.ais.demo.ResponseProcessUtils;
import com.huawei.ais.sdk.AisAccess;
/**
* 视频背景音乐识别服务的使用示例类
*/
public class ASRBgmDemo {
//
// 视频背景音乐识别服务的使用示例函数
//
private static void asrBgmDemo() throws IOException {
// 1. 视频背景音乐识别服务的的基本信息,生成对应的一个客户端连接对象
AisAccess service = ServiceAccessBuilder.builder()
.ak("######") // your ak
.sk("######") // your sk
.region("cn-north-1") // 图像识别服务目前支持华北-北京一(cn-north-1)以及亚太-香港(ap-southeast-1)
.connectionTimeout(5000) // 连接目标url超时限制
.connectionRequestTimeout(1000) // 连接池获取可用连接超时限制
.socketTimeout(20000) // 获取服务器响应数据超时限制
.build();
try {
//
// 2.构建访问视频背景音乐识别服务需要的参数
//
String uri = "/v1.0/bgm/recognition";
JSONObject json = new JSONObject();
// 视频的OBS URL (注:obs链接需要和region区域一致,不同的region的obs资源不共享)
String url = "https://obs-test-llg.obs.cn-north-1.myhuaweicloud.com/bgm_recognition";
json.put("url", url);
// 3.传入视频背景音乐识别服务对应的uri参数, 传入视频背景音乐识别服务需要的参数,
// 该参数主要通过JSON对象的方式传入, 使用POST方法调用服务
HttpResponse response = service.post(uri, json.toJSONString());
// 4.验证服务调用返回的状态是否成功,如果为200, 为成功, 否则失败。
ResponseProcessUtils.processResponseStatus(response);
// 5.处理服务返回的字符流,输出识别结果。
ResponseProcessUtils.processResponse(response);
} catch (Exception e) {
e.printStackTrace();
} finally {
// 6.使用完毕,关闭服务的客户端连接
service.close();
}
}
//
// 主入口函数
//
public static void main(String[] args) throws IOException {
// 测试入口函数
asrBgmDemo();
}
}
<|start_filename|>javascript/image_tagging_aksk_demo.js<|end_filename|>
/**
* 图像标签检测服务aksk请求方式的使用示例
*/
var tagging = require("./ais_sdk/image_tagging");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var app_key = "*************";
var app_secret = "************";
var filepath = "./data/image-tagging-demo.jpg";
var data = utils.changeFileToBase64(filepath);
// obs链接需要和region区域一致,不同的region的obs资源不共享
demo_data_url = "https://ais-sample-data.obs.myhuaweicloud.com/tagging-normal.jpg";
tagging.image_tagging_aksk(app_key, app_secret, data, "", 60, "en", 5, function (result) {
console.log(result);
});
tagging.image_tagging_aksk(app_key, app_secret, "", demo_data_url, 60, "en", 5, function (result) {
console.log(result);
});
<|start_filename|>javascript/image_antiporn_token_demo.js<|end_filename|>
/**
* 图像反黄检测服务token 方式请求的使用示例
*/
var antiporn = require("./ais_sdk/image_antiporn");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
var filepath = "./data/moderation-antiporn.jpg";
var data = utils.changeFileToBase64(filepath);
demo_data_url = 'https://ais-sample-data.obs.cn-north-1.myhuaweicloud.com/antiporn.jpg';
token.getToken(username, domain_name, password, function (token) {
antiporn.image_antiporn(token, data, "", function (result) {
console.log(result);
});
antiporn.image_antiporn(token, "", demo_data_url, function (result) {
console.log(result);
})
});
<|start_filename|>javascript/image_tagging_token_demo.js<|end_filename|>
/**
* 图像标签检测服务token 方式请求的使用示例
*/
var tagging = require("./ais_sdk/image_tagging");
var token = require("./ais_sdk/gettoken");
var utils = require("./ais_sdk/utils");
// 初始化服务的区域信息,目前支持华北-北京一(cn-north-1)、亚太-香港(ap-southeast-1)等区域信息
utils.initRegion("cn-north-1");
var username = "*******"; // 配置用户名
var domain_name = "*******"; // 配置用户名
var password = "*******"; // 密码
var filepath = "./data/image-tagging-demo.jpg";
var data = utils.changeFileToBase64(filepath);
// obs链接需要和region区域一致,不同的region的obs资源不共享
demo_data_url = "https://ais-sample-data.obs.myhuaweicloud.com/tagging-normal.jpg";
token.getToken(username, domain_name, password, function (token) {
tagging.image_tagging(token, data, "", 60, "en", 5, function (result) {
console.log(result);
});
tagging.image_tagging(token, "", demo_data_url, 60, "en", 5, function (result) {
console.log(result);
})
}); | huaweicloudsdk/ais-sdk |
<|start_filename|>src/gamePlayer/view/ribbon.css<|end_filename|>
#loadButton {
-fx-background-image: url("loadButton.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 30 30;
}
#exitButton {
-fx-background-image: url("exitButton.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 30 30;
}
#loadWebButton {
-fx-background-image: url("loadWebButton.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 30 30;
}
#shareWebButton {
-fx-background-image: url("shareWebButton.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 30 30;
}
#pausePlayButton {
-fx-background-image: url("pausePlayButton.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 30 30;
}
#keyboardButton {
-fx-background-image: url("keyboardButton.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 30 30;
}
#saveGameProgressButton {
-fx-background-image: url("saveButton.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 30 30;
}
#loadGameProgressButton {
-fx-background-image: url("loadButton.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 30 30;
}
#container {
-fx-background-size: 1000 100;
-fx-padding: 3px;
-fx-background-color:
linear-gradient(#ffffff 0%, #ced3da 4%, #c1c6cf 17%,
#b4bbc5 18%, #e5ebeb 100%);
}
#tooltip {
-fx-background-radius: 2 2 2 2;
}
#featuredGame1 {
-fx-background-image: url("sampleGame1.png");
-fx-background-repeat: no-repeat;
-fx-background-position: center;
-fx-background-size: 100 50;
} | kevinli194/voogasalad_BitsPlease |
<|start_filename|>js/lib/util/add_shebang.js<|end_filename|>
#!/usr/bin/env node
var fs = require('fs');
var path = "dist/cli.js";
var data = "#!/usr/bin/env node\n\n";
data += fs.readFileSync(path);
fs.writeFileSync(path, data);
<|start_filename|>js/lib/rollup.config.js<|end_filename|>
import rust from "@wasm-tool/rollup-plugin-rust";
import typescript from "rollup-plugin-typescript2";
import { terser } from "rollup-plugin-terser";
export default {
input: {
index: "src/index.ts",
},
output: {
sourcemap: false,
name: "taplo",
format: "umd",
dir: "dist",
},
plugins: [
rust({
debug: process.env["RELEASE"] !== "true",
nodejs: true,
inlineWasm: process.env["SEPARATE_WASM"] !== "true",
cargoArgs: ["--features", "serde,schema,verify"]
}),
typescript(),
terser()
],
};
<|start_filename|>js/lib/package.json<|end_filename|>
{
"name": "@taplo/lib",
"version": "0.2.1",
"description": "A TOML linter and formatter and utility library.",
"types": "dist/index.d.ts",
"scripts": {
"build": "yarn rollup --silent -c rollup.config.js"
},
"homepage": "https://taplo.tamasfe.dev",
"main": "dist/index.js",
"repository": "https://github.com/tamasfe/taplo",
"author": "tamasfe",
"license": "MIT",
"private": false,
"dependencies": {},
"devDependencies": {
"@wasm-tool/rollup-plugin-rust": "^1.0.5",
"rollup": "^2.33.1",
"rollup-plugin-terser": "^7.0.2",
"rollup-plugin-typescript2": "^0.29.0",
"tslib": "^2.0.3",
"typedoc": "^0.19.2",
"typedoc-plugin-markdown": "^3.0.11",
"typescript": "^4.0.5"
}
}
<|start_filename|>site/gatsby-config.js<|end_filename|>
const remarkSlug = require("remark-slug");
module.exports = {
siteMetadata: {
title: "Taplo",
siteUrl: "https://taplo.tamasfe.dev",
},
// flags: { DEV_SSR: true },
plugins: [
{
resolve: `gatsby-plugin-sass`,
options: {
implementation: require("sass"),
},
},
"gatsby-plugin-sharp",
"gatsby-plugin-react-helmet",
"gatsby-plugin-sitemap",
{
resolve: "gatsby-plugin-manifest",
options: {
icon: "src/assets/taplo-icon.svg",
},
},
{
resolve: `gatsby-plugin-mdx`,
options: {
defaultLayouts: {
default: require.resolve("./src/components/doc-page-layout.tsx"),
},
remarkPlugins: [remarkSlug],
},
},
"gatsby-transformer-sharp",
{
resolve: "gatsby-source-filesystem",
options: {
name: "pages",
path: "./src/pages/",
},
__key: "pages",
},
{
resolve: "gatsby-source-filesystem",
options: {
name: "schemas",
path: "./static/schemas",
},
__key: "schemas",
},
"gatsby-transformer-json",
{
resolve: "gatsby-plugin-antd",
options: {
style: true,
},
},
{
resolve: `gatsby-plugin-less`,
options: {
lessOptions: {
modifyVars: {
"primary-color": "#de591b",
},
javascriptEnabled: true,
},
},
},
{
resolve: `gatsby-plugin-typegen`,
},
],
};
<|start_filename|>site/static/schema_index.json<|end_filename|>
{"schemas":[{"title":"Cargo.toml","description":"A schema for Cargo.toml.","updated":"2020-12-25T18:39:37+00:00","url":"https://taplo.tamasfe.dev/schemas/cargo.toml.json","urlHash":"842614c23c7a7c4d872d71ec1a207277f8126d32f5336bda93e715f192886da2","authors":["tamasfe (https://github.com/tamasfe)"],"patterns":["^(.*(/|\\\\)Cargo\\.toml|Cargo\\.toml)$"]},{"title":"pyproject.toml","updated":"2020-12-25T18:39:37+00:00","url":"https://taplo.tamasfe.dev/schemas/pyproject.toml.json","urlHash":"d3bc40f82541aaaa1e61a27bccc8c0bd70192f44978696e3f05d9bffd2fc704a","authors":["zevisert (https://github.com/zevisert)"],"patterns":["^(.*(/|\\\\)pyproject\\.toml|pyproject\\.toml)$"]},{"title":"rustfmt schema","description":"https://rust-lang.github.io/rustfmt","updated":"2020-12-25T18:39:37+00:00","url":"https://taplo.tamasfe.dev/schemas/rustfmt.toml.json","urlHash":"7b54a6432b1b59cd3755d9e20e8cee5852e007763af5ea7e94311efd50539af4","authors":["Aloso (https://github.com/Aloso)"],"patterns":["^(.*(/|\\\\)rustfmt\\.toml|rustfmt\\.toml)$"]},{"title":"golangci-lint Configuration","description":"golangci-lint configuration file","updated":"2020-12-28T20:24:44+00:00","url":"https://json.schemastore.org/golangci-lint","urlHash":"b22f5f71232a8b374c78a2696073d72db11aa411bc48bf0920b483acbc8aaf23","authors":["automatically included from https://schemastore.org"],"patterns":["^(.*(/|\\)\\.golangci\\.toml|\\.golangci\\.toml)$"]},{"title":"hydra.yml","description":"ORY Hydra configuration file","updated":"2020-12-28T20:24:44+00:00","url":"https://raw.githubusercontent.com/ory/hydra/v1.8.5/.schema/version.schema.json","urlHash":"72270a03bb76bce99a1a7b8de8b3eec369f52db4589cf2ffc63334caa4f44ec0","authors":["automatically included from https://schemastore.org"],"patterns":["^(.*(/|\\)hydra\\.toml|hydra\\.toml)$"]},{"title":"keto.yml","description":"ORY Keto configuration file","updated":"2020-12-28T20:24:44+00:00","url":"https://raw.githubusercontent.com/ory/keto/master/.schema/config.schema.json","urlHash":"9a60cb3b9a1bb949fe7bd5be3f290958dfc6e322861e14af5e4cd3173a73867d","authors":["automatically included from https://schemastore.org"],"patterns":["^(.*(/|\\)keto\\.toml|keto\\.toml)$"]},{"title":"oathkeeper.yml","description":"ORY Oathkeeper configuration file","updated":"2020-12-28T20:24:44+00:00","url":"https://raw.githubusercontent.com/ory/oathkeeper/master/.schemas/config.schema.json","urlHash":"88077971a2d3bace7824a1a36b2b2bd3281f10fd85a1f20c28787556f79fd7de","authors":["automatically included from https://schemastore.org"],"patterns":["^(.*(/|\\)oathkeeper\\.toml|oathkeeper\\.toml)$"]},{"title":"HEMTT","description":"HEMTT Project File","updated":"2020-12-28T20:24:44+00:00","url":"https://json.schemastore.org/hemtt-0.6.2","urlHash":"3d774fddb58c322fe4ad21f8ed0a69caff09d23faa3b8f581532bd324b64ccae","authors":["automatically included from https://schemastore.org"],"patterns":["^(.*(/|\\)hemtt\\.toml|hemtt\\.toml)$"]}]} | davidpdrsn/taplo |
<|start_filename|>MoneyDataType/LegacyAmountConverter.cs<|end_filename|>
using System;
using Money.Abstractions;
namespace Money.Serialization
{
internal class LegacyAmountConverter : Newtonsoft.Json.JsonConverter<Amount>
{
private const string ValueName = "value";
private const string CurrencyName = "currency";
private const string Name = "name";
private const string NativeName = "nativename";
private const string EnglishName = "englishname";
private const string Symbol = "symbol";
private const string Iso = "iso";
private const string Dec = "dec";
public override Amount ReadJson(Newtonsoft.Json.JsonReader reader, Type objectType, Amount existingValue, bool hasExistingValue, Newtonsoft.Json.JsonSerializer serializer)
{
var val = default(decimal);
ICurrency currency = null;
string nativename = null;
string englishname = null;
string symbol = null;
string iso = null;
int? dec = null;
while (reader.Read())
{
if (reader.TokenType != Newtonsoft.Json.JsonToken.PropertyName) break;
var propertyName = reader.Value;
switch (propertyName)
{
case ValueName:
val = (decimal)reader.ReadAsDecimal();
break;
case CurrencyName:
currency = Currency.FromISOCode(reader.ReadAsString());
break;
// Kept for backwards compatibility
case Name:
nativename = reader.ReadAsString();
break;
case NativeName:
nativename = reader.ReadAsString();
break;
case EnglishName:
englishname = reader.ReadAsString();
break;
case Symbol:
symbol = reader.ReadAsString();
break;
case Iso:
iso = reader.ReadAsString();
break;
case Dec:
dec = reader.ReadAsInt32();
break;
}
}
if (!Currency.IsKnownCurrency(currency?.CurrencyIsoCode ?? ""))
currency = new Currency(nativename, englishname, symbol, iso, dec.GetValueOrDefault(2));
if (currency is null)
throw new InvalidOperationException("Invalid amount format. Must include a currency");
return new Amount(val, currency);
}
public override void WriteJson(Newtonsoft.Json.JsonWriter writer, Amount amount, Newtonsoft.Json.JsonSerializer serializer)
{
if (amount.Currency is null)
throw new InvalidOperationException("Amount must have a currency applied to allow serialization");
writer.WriteStartObject();
writer.WritePropertyName(ValueName);
writer.WriteValue(amount.Value);
if (Currency.IsKnownCurrency(amount.Currency.CurrencyIsoCode))
{
writer.WritePropertyName(CurrencyName);
writer.WriteValue(amount.Currency.CurrencyIsoCode);
}
else
{
writer.WritePropertyName(NativeName);
writer.WriteValue(amount.Currency.NativeName);
writer.WritePropertyName(EnglishName);
writer.WriteValue(amount.Currency.EnglishName);
writer.WritePropertyName(Symbol);
writer.WriteValue(amount.Currency.Symbol);
writer.WritePropertyName(Iso);
writer.WriteValue(amount.Currency.CurrencyIsoCode);
if (amount.Currency.DecimalPlaces != 2)
{
writer.WritePropertyName(Dec);
writer.WriteValue(amount.Currency.DecimalPlaces);
}
}
writer.WriteEndObject();
}
}
}
<|start_filename|>OrchardCore.Commerce/Drivers/OrderPartDisplayDriver.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.Commerce.ViewModels;
using OrchardCore.ContentManagement;
using OrchardCore.ContentManagement.Display.ContentDisplay;
using OrchardCore.ContentManagement.Display.Models;
using OrchardCore.DisplayManagement.ModelBinding;
using OrchardCore.DisplayManagement.Views;
namespace OrchardCore.Commerce.Drivers
{
public class OrderPartDisplayDriver : ContentPartDisplayDriver<OrderPart>
{
private readonly IProductService _productService;
private readonly IContentManager _contentManager;
public OrderPartDisplayDriver(
IProductService productService,
IContentManager contentManager)
{
_productService = productService;
_contentManager = contentManager;
}
public override IDisplayResult Display(OrderPart orderPart, BuildPartDisplayContext context)
// TODO: add permissions
=> Initialize<OrderPartViewModel>(GetDisplayShapeType(context), m => BuildViewModel(m, orderPart))
.Location("Detail", "Content:25")
.Location("Summary", "Meta:10");
public override IDisplayResult Edit(OrderPart orderPart, BuildPartEditorContext context)
=> Initialize<OrderPartViewModel>(GetEditorShapeType(context), m => BuildViewModel(m, orderPart));
public override async Task<IDisplayResult> UpdateAsync(OrderPart orderPart, IUpdateModel updater, UpdatePartEditorContext context)
{
await updater.TryUpdateModelAsync(orderPart, Prefix, t => t.LineItems);
return Edit(orderPart, context);
}
private Task BuildViewModel(OrderPartViewModel model, OrderPart part)
=> Task.Run(async () =>
{
model.ContentItem = part.ContentItem;
IDictionary<string, ProductPart> products =
await _productService.GetProductDictionary(part.LineItems.Select(line => line.ProductSku));
OrderLineItemViewModel[] lineItems = await Task.WhenAll(part.LineItems.Select(async lineItem =>
{
ProductPart product = products[lineItem.ProductSku];
ContentItemMetadata metaData = await _contentManager.GetContentItemMetadataAsync(product);
return new OrderLineItemViewModel
{
Quantity = lineItem.Quantity,
ProductSku = lineItem.ProductSku,
ProductName = product.ContentItem.DisplayText,
UnitPrice = lineItem.UnitPrice,
LinePrice = lineItem.LinePrice,
ProductRouteValues = metaData.DisplayRouteValues,
Attributes = lineItem.Attributes.ToDictionary(attr => attr.Key, attr => attr.Value)
};
}));
model.LineItems = lineItems;
model.OrderPart = part;
});
}
}
<|start_filename|>OrchardCore.Commerce/Services/ProductPartContentAliasProvider.cs<|end_filename|>
using System.Threading.Tasks;
using OrchardCore.Commerce.Indexes;
using OrchardCore.ContentManagement;
using YesSql;
namespace OrchardCore.Commerce.Services
{
/// <summary>
/// Handles looking up products by SKU.
/// </summary>
public class ProductPartContentAliasProvider : IContentHandleProvider
{
private readonly ISession _session;
public ProductPartContentAliasProvider(ISession session)
{
_session = session;
}
public int Order => 57;
public async Task<string> GetContentItemIdAsync(string alias)
{
if (alias.StartsWith("sku:", System.StringComparison.OrdinalIgnoreCase))
{
var sku = alias.Substring(4).ToLowerInvariant();
var productPartIndex = await _session
.Query<ContentItem, ProductPartIndex>(x => x.Sku == sku)
.FirstOrDefaultAsync();
return productPartIndex?.ContentItemId;
}
return null;
}
}
}
<|start_filename|>MoneyDataType/AmountConverter.cs<|end_filename|>
using System;
using System.Text.Json;
using System.Text.Json.Serialization;
using Money.Abstractions;
namespace Money
{
internal class AmountConverter : JsonConverter<Amount>
{
private const string ValueName = "value";
private const string CurrencyName = "currency";
private const string Name = "name";
private const string NativeName = "nativename";
private const string EnglishName = "englishname";
private const string Symbol = "symbol";
private const string Iso = "iso";
private const string Dec = "dec";
public override Amount Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
var val = default(decimal);
ICurrency currency = null;
string nativename = null;
string englishname = null;
string symbol = null;
string iso = null;
int? dec = null;
while (reader.Read())
{
if (reader.TokenType != JsonTokenType.PropertyName) break;
var propertyName = reader.GetString();
if (!reader.Read()) continue;
switch (propertyName)
{
case ValueName:
val = reader.GetDecimal();
break;
case CurrencyName:
currency = Currency.FromISOCode(reader.GetString());
break;
// Kept for backwards compatibility
case Name:
nativename = reader.GetString();
break;
case NativeName:
nativename = reader.GetString();
break;
case EnglishName:
englishname = reader.GetString();
break;
case Symbol:
symbol = reader.GetString();
break;
case Iso:
iso = reader.GetString();
break;
case Dec:
if (reader.TryGetInt32(out var i)) dec = i;
break;
}
}
if (!Currency.IsKnownCurrency(currency?.CurrencyIsoCode ?? ""))
currency = new Currency(nativename, englishname, symbol, iso, dec.GetValueOrDefault(2));
if (currency is null)
throw new InvalidOperationException("Invalid amount format. Must include a currency");
return new Amount(val, currency);
}
public override void Write(Utf8JsonWriter writer, Amount amount, JsonSerializerOptions options)
{
if (amount.Currency is null)
throw new InvalidOperationException("Amount must have a currency applied to allow serialization");
writer.WriteStartObject();
writer.WriteNumber(ValueName, amount.Value);
if (Currency.IsKnownCurrency(amount.Currency.CurrencyIsoCode))
{
writer.WriteString(CurrencyName, amount.Currency.CurrencyIsoCode);
}
else
{
writer.WriteString(NativeName, amount.Currency.NativeName);
writer.WriteString(EnglishName, amount.Currency.EnglishName);
writer.WriteString(Symbol, amount.Currency.Symbol);
writer.WriteString(Iso, amount.Currency.CurrencyIsoCode);
if (amount.Currency.DecimalPlaces != 2)
writer.WriteNumber(Dec, amount.Currency.DecimalPlaces);
}
writer.WriteEndObject();
}
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IShoppingCartHelpers.cs<|end_filename|>
using System.Collections.Generic;
using System.Threading.Tasks;
using OrchardCore.Commerce.Models;
using OrchardCore.Commerce.ViewModels;
using OrchardCore.ContentManagement.Metadata.Models;
namespace OrchardCore.Commerce.Abstractions
{
public interface IShoppingCartHelpers
{
ShoppingCartLineViewModel GetExistingLine(ShoppingCartViewModel cart, ShoppingCartLineViewModel line);
bool IsSameProductAs(ShoppingCartLineViewModel line, ShoppingCartLineViewModel other);
Task<ShoppingCart> ParseCart(ShoppingCartUpdateModel cart);
Task<ShoppingCartItem> ParseCartLine(ShoppingCartLineUpdateModel line);
HashSet<IProductAttributeValue> ParseAttributes(ShoppingCartLineUpdateModel line, ContentTypeDefinition type);
Task<ShoppingCart> Deserialize(string serializedCart);
Task<string> Serialize(ShoppingCart cart);
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/ShoppingCartViewModel.cs<|end_filename|>
using System.Collections.Generic;
using Money;
namespace OrchardCore.Commerce.ViewModels
{
public class ShoppingCartViewModel
{
public IList<ShoppingCartLineViewModel> Lines { get; set; }
public string Id { get; set; }
public IEnumerable<Amount> Totals { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/OrderLineItemViewModel.cs<|end_filename|>
using System.Collections.Generic;
using Microsoft.AspNetCore.Routing;
using Money;
using OrchardCore.Commerce.Abstractions;
namespace OrchardCore.Commerce.ViewModels
{
public class OrderLineItemViewModel
{
public int Quantity { get; set; }
public string ProductSku { get; set; }
public string ProductName { get; set; }
public RouteValueDictionary ProductRouteValues { get; set; }
public string ProductImageUrl { get; set; }
public Amount UnitPrice { get; set; }
public Amount LinePrice { get; set; }
public IDictionary<string, IProductAttributeValue> Attributes { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Drivers/ProductAttributeFieldDriver.cs<|end_filename|>
using Microsoft.Extensions.Localization;
using OrchardCore.Commerce.Fields;
using OrchardCore.Commerce.Settings;
using OrchardCore.Commerce.ViewModels;
using OrchardCore.ContentManagement.Display.ContentDisplay;
using OrchardCore.ContentManagement.Display.Models;
using OrchardCore.DisplayManagement.Views;
namespace OrchardCore.Commerce.Drivers
{
public abstract class ProductAttributeFieldDriver<TField, TFieldSettings> : ContentFieldDisplayDriver<TField>
where TField : ProductAttributeField, new()
where TFieldSettings : ProductAttributeFieldSettings, new()
{
public ProductAttributeFieldDriver(
IStringLocalizer<ProductAttributeFieldDriver<TField, TFieldSettings>> localizer)
{
T = localizer;
}
public IStringLocalizer T { get; set; }
public override IDisplayResult Edit(TField field, BuildFieldEditorContext context)
{
return Initialize<EditProductAttributeFieldViewModel<TField, TFieldSettings>>(
GetEditorShapeType(context), model =>
{
var settings = new TFieldSettings();
context.PartFieldDefinition.PopulateSettings(settings);
model.Field = field;
model.Settings = settings;
model.Part = context.ContentPart;
model.PartFieldDefinition = context.PartFieldDefinition;
});
}
}
public class BooleanProductAttributeFieldDriver
: ProductAttributeFieldDriver<BooleanProductAttributeField, BooleanProductAttributeFieldSettings>
{
public BooleanProductAttributeFieldDriver(
IStringLocalizer<ProductAttributeFieldDriver<BooleanProductAttributeField, BooleanProductAttributeFieldSettings>> localizer)
: base(localizer) { }
}
public class NumericProductAttributeFieldDriver
: ProductAttributeFieldDriver<NumericProductAttributeField, NumericProductAttributeFieldSettings>
{
public NumericProductAttributeFieldDriver(
IStringLocalizer<ProductAttributeFieldDriver<NumericProductAttributeField, NumericProductAttributeFieldSettings>> localizer)
: base(localizer) { }
}
public class TextProductAttributeFieldDriver
: ProductAttributeFieldDriver<TextProductAttributeField, TextProductAttributeFieldSettings>
{
public TextProductAttributeFieldDriver(
IStringLocalizer<ProductAttributeFieldDriver<TextProductAttributeField, TextProductAttributeFieldSettings>> localizer)
: base(localizer) { }
}
}
<|start_filename|>OrchardCore.Commerce/Models/ShoppingCartItem.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json.Serialization;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Serialization;
namespace OrchardCore.Commerce.Models
{
/// <summary>
/// Ashopping cart item
/// </summary>
[JsonConverter(typeof(ShoppingCartItemConverter))]
public sealed class ShoppingCartItem : IEquatable<ShoppingCartItem>
{
/// <summary>
/// Constructs a new shopping cart item
/// </summary>
/// <param name="quantity">The number of products</param>
/// <param name="product">The product</param>
public ShoppingCartItem(
int quantity,
string productSku,
IEnumerable<IProductAttributeValue> attributes = null,
IEnumerable<PrioritizedPrice> prices = null)
{
if (quantity < 0) throw new ArgumentOutOfRangeException(nameof(quantity));
Quantity = quantity;
ProductSku = productSku ?? throw new ArgumentNullException(nameof(productSku));
Attributes = attributes is null
? new HashSet<IProductAttributeValue>()
: new HashSet<IProductAttributeValue>(attributes);
Prices = prices is null
? new List<PrioritizedPrice>().AsReadOnly()
: new List<PrioritizedPrice>(prices).AsReadOnly();
}
/// <summary>
/// The number of products
/// </summary>
public int Quantity { get; }
/// <summary>
/// The product SKU
/// </summary>
public string ProductSku { get; }
/// <summary>
/// The product attributes associated with this shopping cart line item
/// </summary>
public ISet<IProductAttributeValue> Attributes { get; }
/// <summary>
/// The available prices
/// </summary>
public IReadOnlyList<PrioritizedPrice> Prices { get; }
/// <summary>
/// Creates a new shopping cart item that is a clone of this, but with prices replaced with new ones.
/// </summary>
/// <param name="prices">The list of prices to add.</param>
/// <returns>The new shopping cart item.</returns>
public ShoppingCartItem WithPrices(IEnumerable<PrioritizedPrice> prices)
=> new ShoppingCartItem(Quantity, ProductSku, Attributes, prices);
/// <summary>
/// Creates a new shopping cart item that is a clone of this, but with an additional price.
/// </summary>
/// <param name="price">The price to add.</param>
/// <returns>The new shopping cart item.</returns>
public ShoppingCartItem WithPrice(PrioritizedPrice price)
=> new ShoppingCartItem(Quantity, ProductSku, Attributes, Prices.Concat(new[] { price }));
/// <summary>
/// Creates a new shopping cart item that is a clone of this, but with a different quantity.
/// </summary>
/// <param name="quantity"></param>
/// <returns></returns>
public ShoppingCartItem WithQuantity(int quantity)
=> new ShoppingCartItem(quantity, ProductSku, Attributes, Prices);
public override bool Equals(object obj)
=> !ReferenceEquals(null, obj)
&& (ReferenceEquals(this, obj) || Equals(obj as ShoppingCartItem));
/// <summary>
/// A string representation of the shopping cart item
/// </summary>
/// <returns></returns>
public override string ToString()
=> Quantity + " x " + ProductSku
+ (Attributes.Count != 0 ? " (" + string.Join(", ", Attributes) + ")" : "");
public bool Equals(ShoppingCartItem other)
=> other is null ? false : other.Quantity == Quantity && other.IsSameProductAs(this);
public bool IsSameProductAs(ShoppingCartItem other)
=> ProductSku == other.ProductSku && Attributes.SetEquals(other.Attributes);
public override int GetHashCode() => (ProductSku, Quantity, Attributes).GetHashCode();
}
}
<|start_filename|>OrchardCore.Commerce/Views/Items/ProductAddedToCartEvent.Fields.Design.cshtml<|end_filename|>
@model OrchardCore.Workflows.ViewModels.ActivityViewModel<ProductAddedToCartEvent>
@using OrchardCore.Workflows.Helpers
@using OrchardCore.Commerce.Activities
<header>
<h4><i class="fa fa-shopping-cart"></i>@Model.Activity.GetTitleOrDefault(() => T["Product added to cart"])</h4>
</header>
<|start_filename|>OrchardCore.Commerce/Abstractions/IPredefinedValuesProductAttributeValue.cs<|end_filename|>
namespace OrchardCore.Commerce.Abstractions
{
public interface IPredefinedValuesProductAttributeValue : IProductAttributeValue
{
public object UntypedPredefinedValue { get; }
}
public interface IPredefinedValuesProductAttributeValue<T> : IPredefinedValuesProductAttributeValue
{
public T PredefinedValue { get; }
}
}
<|start_filename|>OrchardCore.Commerce/Settings/ProductAttributeFieldSettings.cs<|end_filename|>
using System.Collections.Generic;
using OrchardCore.Commerce.Abstractions;
namespace OrchardCore.Commerce.Settings
{
/// <summary>
/// A base class for product attribute settings
/// </summary>
public abstract class ProductAttributeFieldSettings
{
/// <summary>
/// The description text to display for this attribute in the product page
/// </summary>
public string Hint { get; set; }
}
/// <summary>
/// A typed base class for product attribute settings
/// </summary>
/// <typeparam name="T"></typeparam>
public abstract class ProductAttributeFieldSettings<T> : ProductAttributeFieldSettings
{
/// <summary>
/// The default value associated with this attribute
/// </summary>
public T DefaultValue { get; set; }
}
/// <summary>
/// Settings for the Boolean product attribute
/// </summary>
public class BooleanProductAttributeFieldSettings : ProductAttributeFieldSettings<bool>
{
/// <summary>
/// The text associated to the checkbox for this attribute in the product page
/// </summary>
public string Label { get; set; }
}
/// <summary>
/// Settings for the numeric product attribute
/// </summary>
public class NumericProductAttributeFieldSettings : ProductAttributeFieldSettings<decimal?>
{
/// <summary>
/// Whether a value is required
/// </summary>
public bool Required { get; set; }
/// <summary>
/// A hint to display when the input is empty
/// </summary>
public string Placeholder { get; set; }
/// <summary>
/// The number of digits after the decimal point
/// </summary>
public int DecimalPlaces { get; set; }
/// <summary>
/// The minimum value allowed
/// </summary>
public decimal? Minimum { get; set; }
/// <summary>
/// The maximum value allowed
/// </summary>
public decimal? Maximum { get; set; }
}
/// <summary>
/// Settings for the text product attribute
/// </summary>
public class TextProductAttributeFieldSettings : ProductAttributeFieldSettings<string>, IPredefinedValuesProductAttributeFieldSettings
{
/// <summary>
/// Whether a value is required
/// </summary>
public bool Required { get; set; }
/// <summary>
/// A hint to display when the input is empty
/// </summary>
public string Placeholder { get; set; }
/// <summary>
/// The set of suggested or allowed values
/// </summary>
public IEnumerable<object> PredefinedValues { get; set; }
/// <summary>
/// Whether values should be restricted to the set of predefined values
/// </summary>
public bool RestrictToPredefinedValues { get; set; }
/// <summary>
/// Whether multiple values can be selected
/// </summary>
public bool MultipleValues { get; set; }
}
}
<|start_filename|>AddressDataType/AddressFormatterProvider.cs<|end_filename|>
namespace InternationalAddress
{
public class AddressFormatterProvider : IAddressFormatterProvider
{
public string Format(Address address)
{
string region = address?.Region;
if (region != null
&& Regions.RegionCodes.TryGetValue(region, out string regionCode)
&& KnownAddressFormatters.Formatters.TryGetValue(regionCode, out var formatter))
{
return formatter.Format(address);
}
return KnownAddressFormatters.DefaultFormatter.Format(address);
}
}
}
<|start_filename|>AddressDataType/AddressFormatter.cs<|end_filename|>
using System;
namespace InternationalAddress
{
/// <summary>
/// A flexible address formatter that can be built with a couple format strings.
/// </summary>
public class AddressFormatter : IAddressFormatter
{
private readonly string _addressFormat;
private readonly string _cityLineFormat;
private readonly bool _uppercase;
/// <summary>
/// Constructs a specialized address formatter from a couple of format strings.
/// </summary>
/// <param name="addressFormat">
/// A format string for the address. Parameters are, in order:
/// 0. The name
/// 1. The department
/// 2. The company or institution
/// 3. The first line of the street address
/// 4. The second line of the street address
/// 5. The city line (<see cref="cityLineFormat"/>)
/// 6. The country
/// </param>
/// <param name="cityLineFormat">
/// A format string for the city line of the address. Parameters are, in order:
/// 0. The city
/// 1. The province or state
/// 2. The postal code
/// </param>
/// <param name="uppercase">If true, the address is changed to uppercase after formatting.</param>
public AddressFormatter(
string addressFormat = @"{0}
{1}
{2}
{3}
{4}
{5}
{6}",
string cityLineFormat = "{0} {1} {2}",
bool uppercase = true)
{
_addressFormat = addressFormat;
_cityLineFormat = cityLineFormat;
_uppercase = uppercase;
}
/// <summary>
/// Formats the address with the format strings provided
/// </summary>
/// <param name="address"></param>
/// <returns></returns>
public string Format(Address address)
{
if (address is null) return "-";
string rawFormatted = String.Format(
_addressFormat,
address.Name,
address.Department,
address.Company,
address.StreetAddress1,
address.StreetAddress2,
String.Format(_cityLineFormat, address.City, address.Province, address.PostalCode),
address.Region);
string withoutEmptyLines = String.Join("", rawFormatted.Split(Environment.NewLine, StringSplitOptions.RemoveEmptyEntries));
return _uppercase ? withoutEmptyLines.ToUpper() : withoutEmptyLines;
}
}
}
<|start_filename|>OrchardCore.Commerce/Models/OrderAdditionalCost.cs<|end_filename|>
using Money;
namespace OrchardCore.Commerce.Models
{
public class OrderAdditionalCost
{
/// <summary>
/// A string describing the kind of additional cost this covers, such as "shipping" or "taxes".
/// </summary>
public string Kind { get; set; }
/// <summary>
/// The description for that additional cost as it will appear in the order or invoice.
/// </summary>
public string Description { get; set; }
/// <summary>
/// The amount that will br charged on top of the cost of items in the order.
/// </summary>
public Amount Cost { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/ProductAttributeValues/TextProductAttributeValue.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using OrchardCore.Commerce.Abstractions;
namespace OrchardCore.Commerce.ProductAttributeValues
{
public class TextProductAttributeValue : BaseProductAttributeValue<IEnumerable<string>>, IPredefinedValuesProductAttributeValue<string>
{
public TextProductAttributeValue(string attributeName, IEnumerable<string> value)
: base(attributeName, value) { }
public TextProductAttributeValue(string attributeName, params string[] values)
: this(attributeName, (IEnumerable<string>)values) { }
public override string Display(CultureInfo culture = null)
=> Value is null || !Value.Any() || Value.First() is null ? "" : FieldName + ": " + String.Join(", ", Value);
public override bool Equals(IProductAttributeValue<IEnumerable<string>> other)
=> other == null || other.Value == null || !other.Value.Any() ? Value == null || !Value.Any()
: Value == null || !Value.Any() || AttributeName != other.AttributeName ? false
: new HashSet<string>(Value).SetEquals(other.Value);
public override int GetHashCode()
=> Value is null ? 1.GetHashCode() : Value.Aggregate(1.GetHashCode(), (code, val) => (code, val).GetHashCode());
public override string ToString() => AttributeName + ": " + String.Join(", ", Value);
public object UntypedPredefinedValue => PredefinedValue;
public string PredefinedValue => Value?.FirstOrDefault();
}
}
<|start_filename|>OrchardCore.Commerce/Views/BooleanProductAttributeField_Edit.cshtml<|end_filename|>
@model OrchardCore.Commerce.ViewModels.EditProductAttributeFieldViewModel<BooleanProductAttributeField, BooleanProductAttributeFieldSettings>
@using OrchardCore.ContentManagement.Metadata.Models
@using OrchardCore.Commerce.Fields
@using OrchardCore.Commerce.Settings
@{
var settings = Model.Settings;
string name = Model.PartFieldDefinition.DisplayName();
}
<fieldset class="form-group">
<label>@name @T["(Boolean product attribute)"]</label>
<ul>
<li class="hint">@T["Label: {0}", settings.Label]</li>
<li class="hint">@(settings.DefaultValue ? T["Default value: on"] : T["Default value: off"])</li>
<li class="hint">@T["Hint: {0}", settings.Hint]</li>
</ul>
</fieldset>
<|start_filename|>OrchardCore.Commerce/Views/NumericProductAttributeValue.cshtml<|end_filename|>
@using OrchardCore.Commerce.ProductAttributeValues
@{
TagBuilder tag = Tag(Model, "li");
tag.AddCssClass("list-inline-item");
var attr = Model.Attribute as NumericProductAttributeValue;
var label = Model.Label as string;
tag.InnerHtml.AppendHtml(T["{0}: {1}", label, attr.Value]);
@tag
}
<|start_filename|>OrchardCore.Commerce/Views/TextProductAttributeField.cshtml<|end_filename|>
@using OrchardCore.Commerce.Settings
@using OrchardCore.Commerce.Abstractions
@{
int index = Model.Index;
var attr = Model.AttributeDescription as ProductAttributeDescription;
var settings = attr.Settings as TextProductAttributeFieldSettings;
var id = $"{Model.IdPrefix}-attribute-{index}";
int optionIndex = 0;
}
<label for="@id">@attr.Name</label>
<input type="hidden" name="line.Attributes[@index].Key" value="@attr.PartName.@attr.Name" />
@if (settings.PredefinedValues is null || settings.PredefinedValues.Count() == 0)
{
<input id="@id" type="text" name="line.Attributes[@index].Value" value="@settings.DefaultValue" placeholder="@settings.Placeholder" class="form-control" />
}
else if (settings.MultipleValues)
{
<ul>
@foreach (string predefinedValue in settings.PredefinedValues)
{
var optionId = $"{id}-{optionIndex++}";
<li>
<input id="optionId" type="checkbox" name="line.Attributes[@index].Value" checked="@(predefinedValue == settings.DefaultValue)" class="form-check-input" />
<label for="@optionId" class="form-check-label">@predefinedValue</label>
</li>
}
</ul>
}
else if (settings.RestrictToPredefinedValues)
{
if (settings.PredefinedValues.Count() > 5) // Doing radio buttons for less than 5 options
{
<select id="@id" name="line.Attributes[@index].Value" class="form-control">
@foreach (string predefinedValue in settings.PredefinedValues)
{
<option selected="@(predefinedValue == settings.DefaultValue)">@predefinedValue</option>
}
</select>
}
else
{
<ul>
@foreach (string predefinedValue in settings.PredefinedValues)
{
var optionId = $"{id}-{optionIndex++}";
<li>
<input id="@optionId" type="radio" name="line.Attributes[@index].Value" value="@predefinedValue" checked="@(predefinedValue == settings.DefaultValue)" class="form-check-input" />
<label for="@optionId" class="form-check-label">@predefinedValue</label>
</li>
}
</ul>
}
}
else
{
<input id="@id" type="text" name="line.Attributes[@index].Value" value="@settings.DefaultValue" placeholder="@settings.Placeholder" class="form-control" />
<select data-related="line.Attributes[@index].Value" class="product-attribute-combo-select form-control">
@foreach (string predefinedValue in settings.PredefinedValues)
{
<option selected="@(predefinedValue == settings.DefaultValue)">@predefinedValue</option>
}
</select>
<script at="Foot">
$(function () {
$(".product-attribute-combo-select").on('change', function (e) {
var select = $(e.target);
select
.closest('form')
.find('input[name="' + select.data('related') + '"]')
.val(select.val());
});
});
</script>
}
@if (settings.Hint != null)
{
<div class="hint form-text text-muted">@settings.Hint</div>
}
<|start_filename|>OrchardCore.Commerce/Views/Items/ProductAddedToCartEvent.Fields.Thumbnail.cshtml<|end_filename|>
<h4 class="card-title"><i class="fa fa-shopping-cart"></i>@T["Product added"]</h4>
<p>@T["Executes when a product is added to the shopping cart."]</p>
<|start_filename|>OrchardCore.Commerce/ViewModels/ShoppingCartUpdateModel.cs<|end_filename|>
using System.Collections.Generic;
namespace OrchardCore.Commerce.ViewModels
{
public class ShoppingCartUpdateModel
{
public IList<ShoppingCartLineUpdateModel> Lines {get;set;}
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/OrderPartViewModel.cs<|end_filename|>
using System.Collections.Generic;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using Money;
using OrchardCore.Commerce.Models;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.ViewModels
{
public class OrderPartViewModel
{
public IList<OrderLineItemViewModel> LineItems { get; set; }
public Amount Total { get; set; }
[BindNever]
public ContentItem ContentItem { get; set; }
[BindNever]
public OrderPart OrderPart { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Services/SimplePriceStrategy.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using Money;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Services
{
/// <summary>
/// A price selection strategy that selects the lowest price of the highest priority prices.
///
/// This price selection strategy will fail if the list of amounts
/// isn't homogeneous in currency, so calling code is responsible for filtering
/// for a specific currency before calling.
/// </summary>
public class SimplePriceStrategy : IPriceSelectionStrategy
{
public Amount SelectPrice(IEnumerable<PrioritizedPrice> prices)
=> prices is null
|| !prices.Any()
? new Amount()
: prices
.Where(pp => pp.Priority == prices.Max(pp => pp.Priority))
.Min(pp => pp.Price);
}
}
<|start_filename|>OrchardCore.Commerce/Views/AddressFieldSettings.Edit.cshtml<|end_filename|>
@model OrchardCore.Commerce.Settings.AddressPartFieldSettings
<div class="form-group">
<div class="row col-md">
<label asp-for="Hint">@T["Hint"]</label>
<textarea asp-for="Hint" rows="2" class="form-control"></textarea>
<span class="hint">@T["The hint text to display for this attribute in the product page."]</span>
</div>
</div>
<|start_filename|>OrchardCore.Commerce/Views/BooleanProductAttributeValue.cshtml<|end_filename|>
@using OrchardCore.Commerce.ProductAttributeValues
@{
var attr = Model.Attribute as BooleanProductAttributeValue;
if (attr.Value)
{
TagBuilder tag = Tag(Model, "li");
tag.AddCssClass("list-inline-item");
var label = Model.Label as string;
tag.InnerHtml.Append(label);
@tag
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IPredefinedValuesProductAttributeFieldSettings.cs<|end_filename|>
using System.Collections.Generic;
namespace OrchardCore.Commerce.Abstractions
{
public interface IPredefinedValuesProductAttributeFieldSettings
{
/// <summary>
/// Whether values should be restricted to the set of predefined values
/// </summary>
public bool RestrictToPredefinedValues { get; }
/// <summary>
/// The set of suggested or allowed values
/// </summary>
public IEnumerable<object> PredefinedValues { get; }
}
}
<|start_filename|>OrchardCore.Commerce/Services/ProductAttributeProvider.cs<|end_filename|>
using System;
using System.Linq;
using System.Text.Json;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Fields;
using OrchardCore.Commerce.ProductAttributeValues;
using OrchardCore.ContentManagement.Metadata.Models;
namespace OrchardCore.Commerce.Services
{
public class ProductAttributeProvider : IProductAttributeProvider
{
public IProductAttributeValue CreateFromJsonElement(
ContentTypePartDefinition partDefinition,
ContentPartFieldDefinition attributeFieldDefinition,
JsonElement value)
{
string attributeFieldTypeName = attributeFieldDefinition.FieldDefinition.Name;
string name = partDefinition.Name + "." + attributeFieldDefinition.Name;
switch (attributeFieldTypeName)
{
case nameof(BooleanProductAttributeField):
return new BooleanProductAttributeValue(name, value.GetBoolean());
case nameof(NumericProductAttributeField):
if (value.TryGetDecimal(out decimal decimalValue))
{
return new NumericProductAttributeValue(name, decimalValue);
}
return new NumericProductAttributeValue(name, null);
case nameof(TextProductAttributeField):
switch (value.ValueKind)
{
case JsonValueKind.String:
return new TextProductAttributeValue(name, value.GetString());
case JsonValueKind.Array:
return new TextProductAttributeValue(name, value.EnumerateArray().Select(el => el.GetString()));
default:
return new TextProductAttributeValue(name, null);
}
default:
return null;
}
}
public IProductAttributeValue Parse(
ContentTypePartDefinition partDefinition,
ContentPartFieldDefinition attributeFieldDefinition,
string[] value)
{
string attributeFieldTypeName = attributeFieldDefinition.FieldDefinition.Name;
string name = partDefinition.Name + "." + attributeFieldDefinition.Name;
switch(attributeFieldTypeName)
{
case nameof(BooleanProductAttributeField):
return new BooleanProductAttributeValue(name,
value != null && value.Contains("true", StringComparer.InvariantCultureIgnoreCase));
case nameof(NumericProductAttributeField):
if (decimal.TryParse(value.FirstOrDefault(), out decimal decimalValue))
{
return new NumericProductAttributeValue(name, decimalValue);
}
return new NumericProductAttributeValue(name, null);
case nameof(TextProductAttributeField):
// TODO: use settings to validate the value, and parse differently if multiple values are allowed.
return new TextProductAttributeValue(name, value);
default:
return null;
}
}
}
}
<|start_filename|>OrchardCore.Commerce/Assets.json<|end_filename|>
[
{
"inputs": [
"Assets/js/commerce-regions.js"
],
"output": "wwwroot/Scripts/commerce-regions.js"
}
]
<|start_filename|>OrchardCore.Commerce/Settings/PricePartSettings.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Text;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace OrchardCore.Commerce.Settings
{
[JsonObject]
public class PricePartSettings
{
[JsonConverter(typeof(StringEnumConverter))]
public CurrencySelectionModeEnum CurrencySelectionMode { get; set; }
public string SpecificCurrencyIsoCode { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IPayment.cs<|end_filename|>
using Money;
namespace OrchardCore.Commerce.Abstractions
{
public interface IPayment
{
/// <summary>
/// The kind of charge, such as "Credit Card", "Cash", "Bitcoin", atc.
/// </summary>
string Kind { get; }
/// <summary>
/// A unique ID for the transaction. The semantics of this can vary by provider.
/// </summary>
string TransactionId { get; }
/// <summary>
/// Text accompanying the charge. The semantics of this can vary by provider.
/// </summary>
string ChargeText { get; }
/// <summary>
/// Amount charged.
/// </summary>
Amount Amount { get; }
}
}
<|start_filename|>OrchardCore.Commerce/Views/NumericProductAttributeFieldSettings.Edit.cshtml<|end_filename|>
@model OrchardCore.Commerce.Settings.NumericProductAttributeFieldSettings
@using System.Globalization
@{
var step = Math.Pow(10, 0 - Model.DecimalPlaces);
var stepAttribute = step.ToString(CultureInfo.InvariantCulture);
string minimum = (Model.Minimum.HasValue) ? Math.Round(Model.Minimum.Value, Model.DecimalPlaces).ToString(CultureInfo.InvariantCulture) : "";
string maximum = (Model.Maximum.HasValue) ? Math.Round(Model.Maximum.Value, Model.DecimalPlaces).ToString(CultureInfo.InvariantCulture) : "";
}
<div class="form-group">
<div class="custom-control custom-checkbox">
<input asp-for="Required" type="checkbox" class="custom-control-input">
<label class="custom-control-label" asp-for="Required">@T["Required"]</label>
<span class="hint">@T["— Whether a value is required."]</span>
</div>
</div>
<div class="form-group">
<div class="row col-md">
<label asp-for="Hint">@T["Hint"]</label>
<textarea asp-for="Hint" rows="2" class="form-control"></textarea>
<span class="hint">@T["The hint text to display for this attribute in the product page."]</span>
</div>
</div>
<div class="form-group">
<div class="row col-md">
<label for="Placeholder">@T["Watermark (placeholder)"]</label>
<input asp-for="Placeholder" type="text" class="form-control" />
<span class="hint">@T["A hint to display when the input is empty. (optional)"]</span>
</div>
</div>
<div class="form-group">
<div class="row col-sm">
<label for="DecimalPlaces">@T["Decimal places"]</label>
<input asp-for="DecimalPlaces" class="form-control affects-numeric-product-attr-default" min="0" max="5" step="1" type="number"
data-related-node="@Html.IdFor(m => m.DefaultValue)" />
<span class="hint">@T["The number of digits after the decimal point."]</span>
</div>
</div>
<div class="form-group">
<div class="row col-sm">
<label for="Minimum">@T["Minimum"]</label>
<input asp-for="Minimum" class="form-control affects-numeric-product-attr-default"
data-related-node="@Html.IdFor(m => m.DefaultValue)" />
<span class="hint">@T["The minimum value allowed. (optional)"]</span>
</div>
</div>
<div class="form-group">
<div class="row col-sm">
<label for="Maximum">@T["Maximum"]</label>
<input asp-for="Maximum" class="form-control affects-numeric-product-attr-default"
data-related-node="@Html.IdFor(m => m.DefaultValue)" />
<span class="hint">@T["The maximum value allowed. (optional)"]</span>
</div>
</div>
<div class="form-group">
<div class="row col-sm">
<label for="DefaultValue">@T["Default value"]</label>
<input asp-for="DefaultValue" class="form-control" min="@minimum" max="@maximum" step="@stepAttribute" type="number"
data-related-min="@Html.IdFor(m => m.Minimum)" data-related-max="@Html.IdFor(m => m.Maximum)"
data-related-decimal-places="@Html.IdFor(m => m.DecimalPlaces)"/>
<span class="hint">@T["The default value. (optional)"]</span>
</div>
</div>
<script at="Foot">
$(function () {
$('.affects-numeric-product-attr-default').on('change', function (e) {
var defaultValueField = $('#' + $(e.target).data('related-node'));
var decimalPlaces = $('#' + defaultValueField.data('related-decimal-places')).val() || 0;
var step = Math.pow(10, 0 - decimalPlaces);
if (!isNaN(step)) defaultValueField.attr('step', step);
var min = Math.round(parseFloat($('#' + defaultValueField.data('related-min')).val()) / step) * step;
if (!isNaN(min)) defaultValueField.attr("min", min);
var max = Math.round(parseFloat($('#' + defaultValueField.data('related-max')).val()) / step) * step;
if (!isNaN(max)) defaultValueField.attr("max", max);
});
});
</script>
<|start_filename|>OrchardCore.Commerce/Migrations/PriceVariantsMigrations.cs<|end_filename|>
using OrchardCore.ContentManagement.Metadata.Settings;
using OrchardCore.ContentManagement.Metadata;
using OrchardCore.Data.Migration;
namespace OrchardCore.Commerce.Migrations
{
/// <summary>
/// Adds the price variants part to the list of available parts.
/// </summary>
public class PriceVariantsMigrations : DataMigration
{
IContentDefinitionManager _contentDefinitionManager;
public PriceVariantsMigrations(IContentDefinitionManager contentDefinitionManager)
{
_contentDefinitionManager = contentDefinitionManager;
}
public int Create()
{
_contentDefinitionManager.AlterPartDefinition("PriceVariantsPart", builder => builder
.Attachable()
.WithDescription("A product variants prices based on predefined attributes."));
return 1;
}
}
}
<|start_filename|>MoneyDataType/Abstractions/ICurrency.cs<|end_filename|>
using System;
namespace Money.Abstractions
{
/// <summary>
/// Currency representation
/// </summary>
public interface ICurrency : IEquatable<ICurrency>
{
/// <summary>
/// The symbol for the currency, usually a single character
/// </summary>
string Symbol { get; }
/// <summary>
/// The full native name of the currency
/// </summary>
string NativeName { get; }
/// <summary>
/// The full english name of the currency
/// </summary>
string EnglishName { get; }
/// <summary>
/// The three-letter ISO 4217 code for the currency if it exists
/// (for non-standardized crypto-currencies for example, follow usage)
/// </summary>
string CurrencyIsoCode { get; }
/// <summary>
/// The number of significant decimal places after the decimal separator
/// </summary>
int DecimalPlaces { get; }
/// <summary>
/// Formats an amount of the currency
/// </summary>
/// <param name="amount">The amount</param>
/// <returns>The formatted amount of the currency</returns>
string ToString(decimal amount);
}
}
<|start_filename|>OrchardCore.Commerce/Services/CreditCardPaymentProvider.cs<|end_filename|>
using System.Collections.Generic;
using System.Globalization;
using Microsoft.Extensions.Localization;
using Money;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Services
{
public class CreditCardPaymentProvider : IPaymentProvider
{
private const string Last4 = nameof(Last4);
private const string ExpirationMonth = nameof(ExpirationMonth);
private const string ExpirationYear = nameof(ExpirationYear);
private readonly IStringLocalizer S;
public CreditCardPaymentProvider(IStringLocalizer<CreditCardPaymentProvider> localizer)
{
S = localizer;
}
public void AddData(IPayment charge, IDictionary<string, string> data)
{
if (charge is CreditCardPayment creditCardCharge)
{
data[Last4] = creditCardCharge.Last4;
data[ExpirationMonth] = creditCardCharge.ExpirationMonth.ToString(CultureInfo.InvariantCulture);
data[ExpirationYear] = creditCardCharge.ExpirationYear.ToString(CultureInfo.InvariantCulture);
}
}
public IPayment CreateCharge(string kind, string transactionId, Amount amount, IDictionary<string, string> data)
=> kind == CreditCardPayment.CreditCardKind ?
new CreditCardPayment
{
TransactionId = transactionId,
Amount = amount,
ChargeText = S["Card **** **** **** {0} expiring {1}/{2}.", data[Last4], data[ExpirationMonth], data[ExpirationYear]].ToString(),
Last4 = data[Last4],
ExpirationMonth = int.TryParse(data[ExpirationMonth], out int expMonth) && expMonth >= 1 && expMonth <=12 ? expMonth : 0,
ExpirationYear = int.TryParse(data[ExpirationYear], out int expYear) && expYear >= 0 ? expYear : 0
} : null;
}
}
<|start_filename|>OrchardCore.Commerce/Drivers/PriceVariantsPartDisplayDriver.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Money;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.Commerce.ViewModels;
using OrchardCore.ContentManagement.Display.ContentDisplay;
using OrchardCore.ContentManagement.Display.Models;
using OrchardCore.DisplayManagement.ModelBinding;
using OrchardCore.DisplayManagement.Views;
namespace OrchardCore.Commerce.Drivers
{
public class PriceVariantsPartDisplayDriver : ContentPartDisplayDriver<PriceVariantsPart>
{
private readonly IMoneyService _moneyService;
private readonly IPredefinedValuesProductAttributeService _predefinedValuesProductAttributeService;
public PriceVariantsPartDisplayDriver(IMoneyService moneyService, IPredefinedValuesProductAttributeService predefinedValuesProductAttributeService)
{
_moneyService = moneyService;
_predefinedValuesProductAttributeService = predefinedValuesProductAttributeService;
}
public override IDisplayResult Display(PriceVariantsPart part, BuildPartDisplayContext context)
{
return Initialize<PriceVariantsPartViewModel>(GetDisplayShapeType(context), m => BuildViewModel(m, part))
.Location("Detail", "Content:25")
.Location("Summary", "Meta:10");
}
public override IDisplayResult Edit(PriceVariantsPart part, BuildPartEditorContext context)
{
return Initialize<PriceVariantsPartViewModel>(GetEditorShapeType(context), m =>
{
BuildViewModel(m, part);
m.Currencies = _moneyService.Currencies;
});
}
public override async Task<IDisplayResult> UpdateAsync(PriceVariantsPart part, IUpdateModel updater, UpdatePartEditorContext context)
{
var updateModel = new PriceVariantsPartViewModel();
if (await updater.TryUpdateModelAsync(updateModel, Prefix, t => t.VariantsValues, t => t.VariantsCurrencies))
{
// Remove any content or the variants would be merged and not be cleared
part.Content.Variants.RemoveAll();
part.Variants = updateModel.VariantsValues
.Where(x => x.Value.HasValue
&& updateModel.VariantsCurrencies?.ContainsKey(x.Key) == true
&& updateModel.VariantsCurrencies[x.Key] != Currency.UnspecifiedCurrency.CurrencyIsoCode)
.ToDictionary(x => x.Key,
x => _moneyService.Create(x.Value.Value, updateModel.VariantsCurrencies[x.Key]));
}
return Edit(part, context);
}
private Task BuildViewModel(PriceVariantsPartViewModel model, PriceVariantsPart part)
{
model.ContentItem = part.ContentItem;
model.PriceVariantsPart = part;
var allVariantsKeys = _predefinedValuesProductAttributeService.GetProductAttributesCombinations(part.ContentItem);
model.Variants = part.Variants ?? new Dictionary<string, Amount>();
model.VariantsValues = allVariantsKeys.ToDictionary(x => x,
x => model.Variants.TryGetValue(x, out var amount)
? new decimal?(amount.Value)
: null);
model.VariantsCurrencies = allVariantsKeys.ToDictionary(x => x,
x => model.Variants.TryGetValue(x, out var amount)
? amount.Currency.CurrencyIsoCode
: Currency.UnspecifiedCurrency.CurrencyIsoCode);
return Task.CompletedTask;
}
}
}
<|start_filename|>MoneyDataType/KnownCurrencyTable.cs<|end_filename|>
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using Money.Abstractions;
namespace Money
{
internal static class KnownCurrencyTable
{
private static readonly object Obj = new object();
internal static IDictionary<string, ICurrency> CurrencyTable { get; private set; }
internal static void EnsureCurrencyTable()
{
if (CurrencyTable == null)
InitCurrencyCodeTable();
}
private class CurrencyEqualityComparer : IEqualityComparer<ICurrency>
{
public bool Equals(ICurrency x, ICurrency y) => x.CurrencyIsoCode == y.CurrencyIsoCode;
public int GetHashCode(ICurrency obj) => obj.CurrencyIsoCode.GetHashCode();
}
private static void InitCurrencyCodeTable()
{
lock (Obj)
{
bool valid(CultureInfo c) => !c.IsNeutralCulture && !c.EnglishName.StartsWith("Unknown Locale") && !c.EnglishName.StartsWith("Invariant Language");
CurrencyTable = CultureInfo.GetCultures(CultureTypes.AllCultures)
.Where(valid)
.Select(c => new Currency(c)).Cast<ICurrency>()
.Distinct(new CurrencyEqualityComparer())
.ToDictionary(k => k.CurrencyIsoCode, e => e);
CurrencyTable.Add("BTC", new Currency("BitCoin", "BitCoin", "₿", "BTC", 8));
CurrencyTable.Add("---", new Currency("Unspecified", "Unspecified", "---", "---"));
}
}
internal static ICurrency FromIsoCode(string isoCode)
{
EnsureCurrencyTable();
return CurrencyTable[isoCode];
}
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IPaymentProvider.cs<|end_filename|>
using System.Collections.Generic;
using Money;
namespace OrchardCore.Commerce.Abstractions
{
public interface IPaymentProvider
{
public IPayment CreateCharge(string kind, string transactionId, Amount amount, IDictionary<string, string> data);
public void AddData(IPayment charge, IDictionary<string, string> data);
}
}
<|start_filename|>OrchardCore.Commerce/Settings/PricePartSettingsDisplayDriver.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.Extensions.Localization;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.Commerce.ViewModels;
using OrchardCore.ContentManagement.Metadata.Models;
using OrchardCore.ContentTypes.Editors;
using OrchardCore.DisplayManagement.ModelBinding;
using OrchardCore.DisplayManagement.Views;
namespace OrchardCore.Commerce.Settings
{
public class PricePartSettingsDisplayDriver : ContentTypePartDefinitionDisplayDriver
{
private readonly IStringLocalizer<PricePartSettingsDisplayDriver> S;
private readonly IMoneyService _moneyService;
public PricePartSettingsDisplayDriver(IStringLocalizer<PricePartSettingsDisplayDriver> localizer, IMoneyService moneyService)
{
S = localizer;
_moneyService = moneyService;
}
public override IDisplayResult Edit(ContentTypePartDefinition contentTypePartDefinition, IUpdateModel updater)
{
if (!String.Equals(nameof(PricePart), contentTypePartDefinition.PartDefinition.Name))
{
return null;
}
return Initialize<PricePartSettingsViewModel>("PricePartSettings_Edit", model =>
{
var settings = contentTypePartDefinition.GetSettings<PricePartSettings>();
model.CurrencySelectionMode = settings.CurrencySelectionMode;
model.CurrencySelectionModes = new List<SelectListItem>()
{
new SelectListItem(CurrencySelectionModeEnum.AllCurrencies.ToString(), S["All Currencies"]),
new SelectListItem(CurrencySelectionModeEnum.DefaultCurrency.ToString(), S["Default Currency"]),
new SelectListItem(CurrencySelectionModeEnum.SpecificCurrency.ToString(), S["Specific Currency"])
};
model.SpecificCurrencyIsoCode = settings.SpecificCurrencyIsoCode;
model.Currencies = _moneyService.Currencies
.OrderBy(c => c.CurrencyIsoCode)
.Select(c => new SelectListItem(
c.CurrencyIsoCode,
$"{c.CurrencyIsoCode} {c.Symbol} - {S[c.EnglishName]}"));
}).Location("Content");
}
public override async Task<IDisplayResult> UpdateAsync(ContentTypePartDefinition contentTypePartDefinition, UpdateTypePartEditorContext context)
{
if (!String.Equals(nameof(PricePart), contentTypePartDefinition.PartDefinition.Name))
{
return null;
}
var model = new PricePartSettingsViewModel();
await context.Updater.TryUpdateModelAsync(model, Prefix,
m => m.CurrencySelectionMode,
m => m.SpecificCurrencyIsoCode);
context.Builder.WithSettings(new PricePartSettings
{
CurrencySelectionMode = model.CurrencySelectionMode,
SpecificCurrencyIsoCode =
model.CurrencySelectionMode == CurrencySelectionModeEnum.SpecificCurrency
? model.SpecificCurrencyIsoCode : null
});
return Edit(contentTypePartDefinition, context.Updater);
}
}
}
<|start_filename|>OrchardCore.Commerce/Assets/js/commerce-regions.js<|end_filename|>
var commerceRegions;
function commerceRegionsInitialize(regionData) {
commerceRegions = regionData;
}
function commerceRegionsBind(provinceDropDown, regionDropDown) {
$(regionDropDown).change(function () {
commerceRegionsOnChange(provinceDropDown, regionDropDown);
});
}
function commerceRegionsOnChange(provinceDropDown, regionDropDown) {
var provinceEl = $(provinceDropDown);
provinceEl.empty();
var regionName = $(regionDropDown).val();
var region = commerceRegions[regionName];
if (region) {
$.each(Object.getOwnPropertyNames(region), function () {
provinceEl.append($("<option/>").val(this).text(region[this]));
});
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/ProductAttributeDescription.cs<|end_filename|>
using OrchardCore.Commerce.Fields;
using OrchardCore.Commerce.Settings;
namespace OrchardCore.Commerce.Abstractions
{
public class ProductAttributeDescription
{
public ProductAttributeDescription(string name, string partName, ProductAttributeField field, ProductAttributeFieldSettings settings)
{
Name = name;
PartName = partName;
Field = field;
Settings = settings;
}
public string Name { get; }
public string PartName { get; }
public ProductAttributeField Field { get; }
public ProductAttributeFieldSettings Settings { get; }
}
}
<|start_filename|>OrchardCore.Commerce/Serialization/ShoppingCartItemConverter.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Text.Json;
using System.Text.Json.Serialization;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.Commerce.ProductAttributeValues;
namespace OrchardCore.Commerce.Serialization
{
internal class ShoppingCartItemConverter : JsonConverter<ShoppingCartItem>
{
private const string quantityName = "quantity";
private const string skuName = "sku";
private const string pricesName = "prices";
private const string attributesName = "attributes";
public override ShoppingCartItem Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
var quantity = 1;
string sku = null;
ISet<IProductAttributeValue> attributes = null;
IList<PrioritizedPrice> prices = null;
while (reader.Read())
{
if (reader.TokenType != JsonTokenType.PropertyName) break;
var propertyName = reader.GetString();
if (!reader.Read()) continue;
switch (propertyName)
{
case quantityName:
quantity = reader.GetInt32();
break;
case skuName:
sku = reader.GetString();
break;
case pricesName:
prices = JsonSerializer.Deserialize<List<PrioritizedPrice>>(ref reader);
break;
case attributesName:
attributes = new HashSet<IProductAttributeValue>();
while (reader.TokenType != JsonTokenType.EndObject)
{
reader.Read();
if (reader.TokenType != JsonTokenType.PropertyName) continue;
var attributeName = reader.GetString();
var value = JsonSerializer.Deserialize<RawProductAttributeValue>(ref reader)
?? new RawProductAttributeValue(null); // It looks like a .NET Core bug that I have to do that, but whatevs. It's for "perf", or so Fowler tells me.
value.SetAttributeName(attributeName);
attributes.Add(value);
}
break;
}
}
return new ShoppingCartItem(quantity, sku, attributes, prices);
}
public override void Write(Utf8JsonWriter writer, ShoppingCartItem value, JsonSerializerOptions options)
{
writer.WriteStartObject();
writer.WriteNumber(quantityName, value.Quantity);
writer.WriteString(skuName, value.ProductSku);
if (value.Prices != null)
{
writer.WritePropertyName(pricesName);
JsonSerializer.Serialize(writer, value.Prices, options);
}
if (value.Attributes != null)
{
writer.WriteStartObject(attributesName);
foreach (var attribute in value.Attributes)
{
writer.WritePropertyName(attribute.AttributeName);
// Re-using the raw attribute serialization logic
JsonSerializer.Serialize(writer, new RawProductAttributeValue(attribute.UntypedValue), options);
}
writer.WriteEndObject();
}
writer.WriteEndObject();
}
}
}
<|start_filename|>OrchardCore.Commerce/Views/NumericProductAttributeField.cshtml<|end_filename|>
@using OrchardCore.Commerce.Settings
@using OrchardCore.Commerce.Abstractions
@{
int index = Model.Index;
var attr = Model.AttributeDescription as ProductAttributeDescription;
var settings = attr.Settings as NumericProductAttributeFieldSettings;
var id = $"{Model.IdPrefix}-attribute-{index}";
}
<label for="@id">@attr.Name</label>
<input type="hidden" name="line.Attributes[@index].Key" value="@attr.PartName.@attr.Name" />
<input id="@id" type="number" name="line.Attributes[@index].Value" value="@(settings.DefaultValue.ToString() ?? "")"
min="@settings.Minimum" max="@settings.Maximum" step="@Math.Pow(10, -settings.DecimalPlaces)"
placeholder="@settings.Placeholder" required="@settings.Required" class="form-control" />
@if (settings.Hint != null)
{
<div class="hint form-text text-muted">@settings.Hint</div>
}
<|start_filename|>OrchardCore.Commerce/Migrations/OrderMigrations.cs<|end_filename|>
using OrchardCore.ContentManagement.Metadata.Settings;
using OrchardCore.ContentManagement.Metadata;
using OrchardCore.Data.Migration;
using OrchardCore.Commerce.Fields;
using OrchardCore.Commerce.Settings;
namespace OrchardCore.Commerce.Migrations
{
/// <summary>
/// Adds the order part to the list of available parts and the address field to the list of available fields.
/// </summary>
public class OrderMigrations : DataMigration
{
IContentDefinitionManager _contentDefinitionManager;
public OrderMigrations(IContentDefinitionManager contentDefinitionManager)
{
_contentDefinitionManager = contentDefinitionManager;
}
public int Create()
{
_contentDefinitionManager.AlterPartDefinition("OrderPart", builder => builder
.Attachable()
.WithDescription("Makes a content item into an order."));
_contentDefinitionManager.MigrateFieldSettings<AddressField, AddressPartFieldSettings>();
return 1;
}
}
}
<|start_filename|>OrchardCore.Commerce/Serialization/RawProductAttributeValueConverter.cs<|end_filename|>
using System;
using System.Text.Json;
using System.Text.Json.Serialization;
using OrchardCore.Commerce.ProductAttributeValues;
namespace OrchardCore.Commerce.Serialization
{
internal class RawProductAttributeValueConverter : JsonConverter<RawProductAttributeValue>
{
public override RawProductAttributeValue Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
=> new RawProductAttributeValue(JsonSerializer.Deserialize<object>(ref reader, options));
public override void Write(Utf8JsonWriter writer, RawProductAttributeValue value, JsonSerializerOptions options)
=> JsonSerializer.Serialize(writer, value.UntypedValue, options);
}
}
<|start_filename|>OrchardCore.Commerce/Fields/ProductAttributeField.cs<|end_filename|>
using OrchardCore.Commerce.Settings;
using OrchardCore.ContentManagement;
using OrchardCore.ContentManagement.Metadata.Models;
namespace OrchardCore.Commerce.Fields
{
public abstract class ProductAttributeField : ContentField
{
public ProductAttributeField() { }
}
/// <summary>
/// Adds the ability for a product to be modified with a set of attributes, in particular when
/// added to a shopping cart.
/// Examples of attributes can be shirt sizes (S, M, L, XL), dimensions, etc.
/// </summary>
public abstract class ProductAttributeField<TSettings> : ProductAttributeField where TSettings : ProductAttributeFieldSettings, new()
{
public TSettings GetSettings(ContentPartFieldDefinition partFieldDefinition)
{
var settings = new TSettings();
partFieldDefinition.PopulateSettings(settings);
return settings;
}
}
/// <summary>
/// A Boolean product attribute
/// </summary>
public class BooleanProductAttributeField : ProductAttributeField<BooleanProductAttributeFieldSettings> { }
/// <summary>
/// A numeric product attribute
/// </summary>
public class NumericProductAttributeField : ProductAttributeField<NumericProductAttributeFieldSettings> { }
/// <summary>
/// A text product attribute, that may also have predefined values and be used as enumerations or flags
/// </summary>
public class TextProductAttributeField : ProductAttributeField<TextProductAttributeFieldSettings> { }
}
<|start_filename|>OrchardCore.Commerce/Views/AddressField_Edit.cshtml<|end_filename|>
@model OrchardCore.Commerce.ViewModels.AddressFieldViewModel
@using OrchardCore.Commerce.Settings
@{
var settings = Model.PartFieldDefinition.GetSettings<AddressPartFieldSettings>();
}
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.Name">@T["Name"]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.Name">
<input asp-for="Address.Name" class="form-control content-preview-text" required="required" />
</div>
</div>
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.Department">@T["Department"]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.Department">
<input asp-for="Address.Department" class="form-control content-preview-text" />
</div>
</div>
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.Company">@T["Company or institution"]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.Company">
<input asp-for="Address.Company" class="form-control content-preview-text" />
</div>
</div>
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.StreetAddress1">@T["Street address"]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.StreetAddress1">
<input asp-for="Address.StreetAddress1" class="form-control content-preview-text" required="required" />
</div>
</div>
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.StreetAddress2">@T[""]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.StreetAddress2">
<input asp-for="Address.StreetAddress2" class="form-control content-preview-text" />
</div>
</div>
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.City">@T["City"]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.City">
<input asp-for="Address.City" class="form-control content-preview-text" required="required" />
</div>
</div>
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.Province">@T["State or province"]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.Province">
<select asp-for="Address.Province" class="form-control content-preview-text" required="required"></select>
</div>
</div>
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.PostalCode">@T["Postal code"]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.PostalCode">
<input asp-for="Address.PostalCode" class="form-control content-preview-text" />
</div>
</div>
<div class="row">
<div class="form-group col-md-12">
<label asp-for="Address.Region">@T["Country or region"]</label>
</div>
<div class="form-group col-md-6" asp-validation-class-for="Address.Region">
<select asp-for="Address.Region"
asp-items="@Model.Regions.OrderBy(region => region.DisplayName).Select(region => new SelectListItem(
region.DisplayName,
region.TwoLetterISORegionName))"
class="form-control content-preview-text" required="required"></select>
</div>
</div>
@if (!string.IsNullOrWhiteSpace(settings.Hint))
{
<span class="hint">@settings.Hint</span>
}
<script asp-name="commerce-regions" asp-src="~/OrchardCore.Commerce/Scripts/commerce-regions.js" at="Foot" depends-on="jQuery"></script>
<script at="Foot" depends-on="commerce-regions">
@if (Context.Items["commerce-regions-initialized"] == null)
{
<text>commerceRegionsInitialize(@Html.Raw(Newtonsoft.Json.JsonConvert.SerializeObject(Model.Provinces)));</text>
Context.Items["commerce-regions-initialized"] = true;
}
commerceRegionsBind("#@Html.IdFor(m => m.Address.Province)", "#@Html.IdFor(m => m.Address.Region)");
</script>
<|start_filename|>OrchardCore.Commerce/Abstractions/IPriceProvider.cs<|end_filename|>
using System.Collections.Generic;
using System.Threading.Tasks;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Abstractions
{
/// <summary>
/// Price providers add prices to shopping cart items.
///
/// </summary>
public interface IPriceProvider
{
/// <summary>
/// Adds prices to shopping cart items.
/// </summary>
/// <param name="items">The quantities and products to which prices must be added.</param>
Task<IEnumerable<ShoppingCartItem>> AddPrices(IEnumerable<ShoppingCartItem> items);
/// <summary>
/// Price providers are invited to add prices in increasing order.
/// </summary>
int Order { get; }
}
}
<|start_filename|>OrchardCore.Commerce/Views/NumericProductAttributeField_Edit.cshtml<|end_filename|>
@model OrchardCore.Commerce.ViewModels.EditProductAttributeFieldViewModel<NumericProductAttributeField, NumericProductAttributeFieldSettings>
@using OrchardCore.ContentManagement.Metadata.Models
@using OrchardCore.Commerce.Fields
@using OrchardCore.Commerce.Settings
@{
var settings = Model.Settings;
string name = Model.PartFieldDefinition.DisplayName();
}
<fieldset class="form-group">
<label>@name @T["(Numeric product attribute)"]</label>
<ul>
@if (settings.Required) {
<li class="hint">@T["Required"]</li>
}
<li class="hint">@T["Decimal places: {0}", settings.DecimalPlaces]</li>
@if (settings.DefaultValue.HasValue) {
<li class="hint">@T["Default value: {0}", settings.DefaultValue.Value]</li>
}
@if (settings.Minimum.HasValue && settings.Maximum.HasValue) {
<li class="hint">@T["Value must be between {0} and {1}", settings.Minimum.Value, settings.Maximum.Value]</li>
} else if (settings.Minimum.HasValue) {
<li class="hint">@T["Minimum value: {0}", settings.Minimum.Value]</li>
} else if (settings.Maximum.HasValue) {
<li class="hint">@T["Maximum value: {0}", settings.Maximum.Value]</li>
}
<li class="hint">@T["Placeholder: {0}", settings.Placeholder]</li>
<li class="hint">@T["Hint: {0}", settings.Hint]</li>
</ul>
</fieldset>
<|start_filename|>OrchardCore.Commerce/Views/AddressField.cshtml<|end_filename|>
@model OrchardCore.Commerce.ViewModels.AddressFieldViewModel
@Model.AddressHtml
<|start_filename|>OrchardCore.Commerce/Models/PriceVariantsPart.cs<|end_filename|>
using System.Collections.Generic;
using Money;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.Models
{
/// <summary>
/// A product variants prices based on predefined attributes.
/// </summary>
public class PriceVariantsPart : ContentPart
{
public Dictionary<string, Amount> Variants { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Views/ProductPart.Edit.cshtml<|end_filename|>
@model ProductPartViewModel
<div class="form-group" asp-validation-class-for="Sku">
<label asp-for="Sku">@T["SKU"] </label>
<input asp-for="Sku" class="form-control text-muted" />
<span asp-validation-for="Sku"></span>
<span class="hint">
@T["The SKU of the product."]
</span>
</div>
<|start_filename|>OrchardCore.Commerce/Models/CreditCardPayment.cs<|end_filename|>
using Money;
using OrchardCore.Commerce.Abstractions;
namespace OrchardCore.Commerce.Models
{
public class CreditCardPayment : IPayment
{
public static readonly string CreditCardKind = "Credit Card";
public string Kind => CreditCardKind;
public string TransactionId { get; set; }
public string ChargeText { get; set; }
public Amount Amount { get; set; }
/// <summary>
/// The last 4 characters of the credit card number.
/// </summary>
public string Last4 { get; set; }
/// <summary>
/// The expiration month of the credit card.
/// </summary>
public int ExpirationMonth { get; set; }
/// <summary>
/// The expiration year of the credit card.
/// </summary>
public int ExpirationYear { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/ProductAttributeValues/BaseProductAttributeValue.cs<|end_filename|>
using System;
using System.Globalization;
using OrchardCore.Commerce.Abstractions;
namespace OrchardCore.Commerce.ProductAttributeValues
{
public class BaseProductAttributeValue<T> : IProductAttributeValue<T>
{
public BaseProductAttributeValue(string attributeName, T value)
{
AttributeName = attributeName;
Value = value;
}
public virtual T Value { get; }
public virtual string AttributeName { get; protected set; }
public object UntypedValue => Value;
public virtual string Display(CultureInfo culture = null)
=> FieldName + ": " + Convert.ToString(Value, culture ?? CultureInfo.InvariantCulture);
public virtual bool Equals(IProductAttributeValue<T> other)
=> other != null
&& AttributeName == other.AttributeName
&& (Value == null && other.Value == null || Value.Equals(other.Value));
public override bool Equals(object obj) => obj != null && obj is IProductAttributeValue<T> other && Equals(other);
public override int GetHashCode() => (AttributeName, Value).GetHashCode();
public override string ToString() => AttributeName + ": " + Value;
public string FieldName
{
get
{
int dot = AttributeName.IndexOf('.');
if (dot == -1 || dot + 1 == AttributeName.Length) return AttributeName;
return AttributeName.Substring(dot + 1);
}
}
}
}
<|start_filename|>OrchardCore.Commerce/wwwroot/Scripts/commerce-regions.js<|end_filename|>
/*
** NOTE: This file is generated by Gulp and should not be edited directly!
** Any changes made directly to this file will be overwritten next time its asset group is processed by Gulp.
*/
var commerceRegions;
function commerceRegionsInitialize(regionData) {
commerceRegions = regionData;
}
function commerceRegionsBind(provinceDropDown, regionDropDown) {
$(regionDropDown).change(function () {
commerceRegionsOnChange(provinceDropDown, regionDropDown);
});
}
function commerceRegionsOnChange(provinceDropDown, regionDropDown) {
var provinceEl = $(provinceDropDown);
provinceEl.empty();
var regionName = $(regionDropDown).val();
var region = commerceRegions[regionName];
if (region) {
$.each(Object.getOwnPropertyNames(region), function () {
provinceEl.append($("<option/>").val(this).text(region[this]));
});
}
}
<|start_filename|>OrchardCore.Commerce/Views/PricePartSettings.Edit.cshtml<|end_filename|>
@model PricePartSettingsViewModel
<fieldset class="form-group" asp-validation-class-for="PricePartSettings">
<label asp-for="CurrencySelectionMode">@T["Currency selection mode"]</label>
<div class="input-group">
<div class="input-group-append">
<select asp-for="CurrencySelectionMode"
asp-items="@(new SelectList(Model.CurrencySelectionModes, "Text", "Value"))"></select>
</div>
</div>
<span class="hint">@T["Specifies the currency configuration for this PricePart."]</span>
<fieldset class="input-group" id="@Html.IdFor(m => m.SpecificCurrencyIsoCode)-Group">
<div class="input-group">
<div class="input-group-append">
<select asp-for="SpecificCurrencyIsoCode"
asp-items="@(new SelectList(Model.Currencies, "Text", "Value"))"></select>
</div>
</div>
<span class="hint">@T["Currency used for specific currency selection mode."]</span>
</fieldset>
</fieldset>
<script at="Foot">
$(function () {
$(document).ready(function () {
SpecificCurrencyGroupVisibility($('#@Html.IdFor(m => m.CurrencySelectionMode)'), $('#@Html.IdFor(m => m.SpecificCurrencyIsoCode)-Group'));
});
$('#@Html.IdFor(m => m.CurrencySelectionMode)').change(function () {
SpecificCurrencyGroupVisibility($(this), $('#@Html.IdFor(m => m.SpecificCurrencyIsoCode)-Group'));
});
function SpecificCurrencyGroupVisibility(element, group) {
if ('@Html.ValueFor(m => m.SingleSelectionModeEditor)' === element.val()) {
group.show();
} else {
group.hide();
}
}
});
</script>
<|start_filename|>OrchardCore.Commerce/Handlers/PricePartHandler.cs<|end_filename|>
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.ContentManagement.Handlers;
namespace OrchardCore.Commerce.Handlers
{
public class PricePartHandler : ContentPartHandler<PricePart>
{
private readonly IMoneyService _moneyService;
public PricePartHandler(IMoneyService moneyService)
{
_moneyService = moneyService;
}
public override Task LoadingAsync(LoadContentContext context, PricePart part)
{
part.Price = _moneyService.EnsureCurrency(part.Price);
return base.LoadingAsync(context, part);
}
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IProductAttributeProvider.cs<|end_filename|>
using System.Text.Json;
using OrchardCore.ContentManagement.Metadata.Models;
namespace OrchardCore.Commerce.Abstractions
{
public interface IProductAttributeProvider
{
IProductAttributeValue Parse(
ContentTypePartDefinition partDefinition,
ContentPartFieldDefinition attributeFieldDefinition,
string[] value);
public IProductAttributeValue Parse(
ContentTypePartDefinition partDefinition,
ContentPartFieldDefinition attributeFieldDefinition,
string value)
=> Parse(partDefinition, attributeFieldDefinition, new[] { value });
IProductAttributeValue CreateFromJsonElement(
ContentTypePartDefinition partDefinition,
ContentPartFieldDefinition attributeFieldDefinition,
JsonElement value);
}
}
<|start_filename|>OrchardCore.Commerce/Views/BooleanProductAttributeFieldSettings.Edit.cshtml<|end_filename|>
@model OrchardCore.Commerce.Settings.BooleanProductAttributeFieldSettings
<div class="form-group">
<div class="row col-md">
<label asp-for="Hint">@T["Hint"]</label>
<textarea asp-for="Hint" rows="2" class="form-control"></textarea>
<span class="hint">@T["The description text to display for this attribute in the product page."]</span>
</div>
</div>
<div class="form-group">
<div class="row col-sm">
<label asp-for="Label">@T["Label"]</label>
<input asp-for="Label" class="form-control" />
<span class="hint">@T["The text associated to the checkbox for this attribute in the product page."]</span>
</div>
</div>
<div class="form-group">
<div class="row col-sm">
<label asp-for="Label">@T["Default value"]</label>
</div>
<div class="custom-control custom-checkbox">
<input asp-for="DefaultValue" type="checkbox" class="custom-control-input">
<label class="custom-control-label" asp-for="DefaultValue">@T["On/Off"]</label>
</div>
<div class="row col-md">
<span class="hint">@T["The default value associated with this attribute."]</span>
</div>
</div>
<|start_filename|>OrchardCore.Commerce/CommerceConstants.cs<|end_filename|>
namespace OrchardCore.Commerce
{
public static class CommerceConstants
{
public static class Features
{
public const string Core = "OrchardCore.Commerce";
public const string SessionCartStorage = "OrchardCore.Commerce.SessionCartStorage";
public const string CommerceSettingsCurrencySelector = "OrchardCore.Commerce.CommerceSettingsCurrencySelector";
}
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IShoppingCartPersistence.cs<|end_filename|>
using System.Collections.Generic;
using System.Threading.Tasks;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Abstractions
{
public interface IShoppingCartPersistence
{
Task<ShoppingCart> Retrieve(string shoppingCartId = null);
Task Store(ShoppingCart items, string shoppingCartId = null);
string GetUniqueCartId(string shoppingCartId);
}
}
<|start_filename|>OrchardCore.Commerce/Migrations/ProductMigrations.cs<|end_filename|>
using OrchardCore.ContentManagement.Metadata.Settings;
using OrchardCore.ContentManagement.Metadata;
using OrchardCore.Data.Migration;
using OrchardCore.Commerce.Indexes;
using YesSql.Sql;
namespace OrchardCore.Commerce.Migrations
{
/// <summary>
/// Adds the product part to the list of available parts.
/// </summary>
public class ProductMigrations : DataMigration
{
IContentDefinitionManager _contentDefinitionManager;
public ProductMigrations(IContentDefinitionManager contentDefinitionManager)
{
_contentDefinitionManager = contentDefinitionManager;
}
public int Create()
{
_contentDefinitionManager.AlterPartDefinition("ProductPart", builder => builder
.Attachable()
.WithDescription("Makes a content item into a product."));
SchemaBuilder.CreateMapIndexTable<ProductPartIndex>(
table => table
.Column<string>("Sku", col => col.WithLength(128))
.Column<string>("ContentItemId", c => c.WithLength(26))
);
SchemaBuilder.AlterTable(nameof(ProductPartIndex), table => table
.CreateIndex("IDX_ProductPartIndex_Sku", "Sku")
);
return 1;
}
}
}
<|start_filename|>OrchardCore.Commerce/Serialization/PrioritizedPriceConverter.cs<|end_filename|>
using System;
using System.Text.Json;
using System.Text.Json.Serialization;
using Money;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Serialization
{
internal class PrioritizedPriceConverter : JsonConverter<PrioritizedPrice>
{
private const string PriorityName = "priority";
private const string AmountName = "amount";
public override PrioritizedPrice Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
var priority = Int32.MinValue;
var amount = new Amount(0, Currency.UnspecifiedCurrency);
while (reader.Read())
{
if (reader.TokenType != JsonTokenType.PropertyName) break;
var propertyName = reader.GetString();
if (!reader.Read()) continue;
switch (propertyName)
{
case PriorityName:
priority = reader.GetInt32();
break;
case AmountName:
amount = JsonSerializer.Deserialize<Amount>(ref reader);
break;
}
}
if (priority > int.MinValue && !amount.Currency.Equals(Currency.UnspecifiedCurrency))
{
return new PrioritizedPrice(priority, amount);
}
return null;
}
public override void Write(Utf8JsonWriter writer, PrioritizedPrice prioritizedPrice, JsonSerializerOptions options)
{
writer.WriteStartObject();
writer.WriteNumber(PriorityName, prioritizedPrice.Priority);
writer.WritePropertyName(AmountName);
JsonSerializer.Serialize(writer, prioritizedPrice.Price, options);
writer.WriteEndObject();
}
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/PriceVariantsPartViewModel.cs<|end_filename|>
using System.Collections.Generic;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using Money;
using Money.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.ViewModels
{
public class PriceVariantsPartViewModel
{
public Dictionary<string, decimal?> VariantsValues { get; set; }
public Dictionary<string, string> VariantsCurrencies { get; set; }
public IEnumerable<ICurrency> Currencies { get; set; }
[BindNever]
public ContentItem ContentItem { get; set; }
[BindNever]
public PriceVariantsPart PriceVariantsPart { get; set; }
[BindNever]
public Dictionary<string, Amount> Variants { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/ProductAttributeValues/RawProductAttributeValue.cs<|end_filename|>
using System.Text.Json.Serialization;
using OrchardCore.Commerce.Serialization;
namespace OrchardCore.Commerce.ProductAttributeValues
{
/// <summary>
/// Used only to deserialize attributes, before they're post-processed into concrete attribute values.
/// </summary>
[JsonConverter(typeof(RawProductAttributeValueConverter))]
internal class RawProductAttributeValue : BaseProductAttributeValue<object>
{
public RawProductAttributeValue(object value)
: base(null, value) { }
public void SetAttributeName(string name) => AttributeName = name;
}
}
<|start_filename|>OrchardCore.Commerce.Tests/Fakes/FakeCartStorage.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Tests.Fakes
{
public class FakeCartStorage : IShoppingCartPersistence
{
private Dictionary<string, ShoppingCart> _carts = new Dictionary<string, ShoppingCart>();
public FakeCartStorage(ShoppingCart cart = null, string cartId = null)
{
_carts[cartId ?? ""] = cart != null
? new ShoppingCart(cart.Items)
: new ShoppingCart();
}
public string GetUniqueCartId(string shoppingCartId)
=> Guid.NewGuid().ToString();
public Task<ShoppingCart> Retrieve(string shoppingCartId = null)
{
if (!_carts.TryGetValue(shoppingCartId ?? "", out var cart))
{
cart = new ShoppingCart();
_carts.Add(shoppingCartId ?? "", cart);
}
return Task.FromResult(cart);
}
public Task Store(ShoppingCart cart, string shoppingCartId = null)
{
_carts[shoppingCartId ?? ""] = new ShoppingCart(cart.Items);
return Task.CompletedTask;
}
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IProductAttributeValue.cs<|end_filename|>
using System;
using System.Globalization;
namespace OrchardCore.Commerce.Abstractions
{
public interface IProductAttributeValue
{
string AttributeName { get; }
object UntypedValue { get; }
string Display(CultureInfo culture = null);
public string Label
{
get
{
string[] splitName = AttributeName.Split('.');
if (splitName.Length < 2) return AttributeName;
return splitName[1];
}
}
public string PartName
{
get
{
string[] splitName = AttributeName.Split('.');
if (splitName.Length < 2) return null;
return splitName[0];
}
}
}
public interface IProductAttributeValue<T> : IProductAttributeValue, IEquatable<IProductAttributeValue<T>>
{
T Value { get; }
}
}
<|start_filename|>OrchardCore.Commerce/Settings/AddressFieldSettingsDriver.cs<|end_filename|>
using System.Threading.Tasks;
using OrchardCore.Commerce.Fields;
using OrchardCore.ContentManagement.Metadata.Models;
using OrchardCore.ContentTypes.Editors;
using OrchardCore.DisplayManagement.Views;
namespace OrchardCore.Commerce.Settings
{
public class AddressFieldSettingsDriver : ContentPartFieldDefinitionDisplayDriver<AddressField>
{
public override IDisplayResult Edit(ContentPartFieldDefinition partFieldDefinition)
=> Initialize<AddressPartFieldSettings>("AddressFieldSettings_Edit", model => partFieldDefinition.PopulateSettings(model))
.Location("Content");
public override async Task<IDisplayResult> UpdateAsync(ContentPartFieldDefinition partFieldDefinition, UpdatePartFieldEditorContext context)
{
var model = new AddressPartFieldSettings();
await context.Updater.TryUpdateModelAsync(model, Prefix);
context.Builder.WithSettings(model);
return Edit(partFieldDefinition);
}
}
}
<|start_filename|>OrchardCore.Commerce/Activities/ProductAddedToCartEvent.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Microsoft.Extensions.Localization;
using OrchardCore.Workflows.Abstractions.Models;
using OrchardCore.Workflows.Activities;
using OrchardCore.Workflows.Models;
namespace OrchardCore.Commerce.Activities
{
public class ProductAddedToCartEvent : EventActivity
{
private readonly IStringLocalizer<ProductAddedToCartEvent> S;
public ProductAddedToCartEvent(IStringLocalizer<ProductAddedToCartEvent> localizer)
{
S = localizer;
}
public override string Name => nameof(ProductAddedToCartEvent);
public override LocalizedString DisplayText => S["Product addded to cart"];
public override LocalizedString Category => S["Commerce"];
public override IEnumerable<Outcome> GetPossibleOutcomes(WorkflowExecutionContext workflowContext, ActivityContext activityContext)
=> Outcomes(S["Done"]);
}
}
<|start_filename|>OrchardCore.Commerce/Settings/AddressPartFieldSettings.cs<|end_filename|>
namespace OrchardCore.Commerce.Settings
{
public class AddressPartFieldSettings
{
public string Hint { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/PricePartSettingsViewModel.cs<|end_filename|>
using System.Collections.Generic;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using Microsoft.AspNetCore.Mvc.Rendering;
using OrchardCore.Commerce.Settings;
namespace OrchardCore.Commerce.ViewModels
{
public class PricePartSettingsViewModel
{
public CurrencySelectionModeEnum CurrencySelectionMode { get; set; }
public string SpecificCurrencyIsoCode { get; set; }
public IEnumerable<SelectListItem> CurrencySelectionModes { get; set; }
public IEnumerable<SelectListItem> Currencies { get; set; }
public CurrencySelectionModeEnum SingleSelectionModeEditor => CurrencySelectionModeEnum.SpecificCurrency;
[BindNever]
public PricePartSettings PricePartSettings { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/EditProductAttributeFieldViewModel.cs<|end_filename|>
using OrchardCore.Commerce.Fields;
using OrchardCore.Commerce.Settings;
using OrchardCore.ContentManagement;
using OrchardCore.ContentManagement.Metadata.Models;
namespace OrchardCore.Commerce.ViewModels
{
public class EditProductAttributeFieldViewModel<TField, TFieldSettings>
where TField : ProductAttributeField
where TFieldSettings : ProductAttributeFieldSettings
{
public TField Field { get; set; }
public TFieldSettings Settings { get; set; }
public ContentPart Part { get; set; }
public ContentPartFieldDefinition PartFieldDefinition { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Fields/AddressField.cs<|end_filename|>
using InternationalAddress;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.Fields
{
public class AddressField : ContentField
{
public Address Address { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Services/NullCurrencySelector.cs<|end_filename|>
using Money.Abstractions;
using OrchardCore.Commerce.Abstractions;
namespace OrchardCore.Commerce.Services
{
public class NullCurrencySelector : ICurrencySelector
{
public ICurrency CurrentDisplayCurrency => null;
}
}
<|start_filename|>OrchardCore.Commerce.Tests/SerializationTests.cs<|end_filename|>
using Money;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.Commerce.ProductAttributeValues;
using OrchardCore.Commerce.Services;
using OrchardCore.Commerce.Tests.Fakes;
using System.Threading.Tasks;
using Xunit;
namespace OrchardCore.Commerce.Tests
{
public class SerializationTests
{
[Fact]
public async Task ShoppingCartSerializesAndDeserializes()
{
var cart = new ShoppingCart(
new ShoppingCartItem(2, "product-1", prices: new[]
{
new PrioritizedPrice(0, new Amount(10, Currency.Euro)),
new PrioritizedPrice(1, new Amount(7, Currency.USDollar))
}),
new ShoppingCartItem(1, "product-2", attributes: new IProductAttributeValue[]
{
new BooleanProductAttributeValue("ProductPart3.attr1", true),
new NumericProductAttributeValue("ProductPart3.attr3", (decimal?)42.0)
}, prices: new[]
{
new PrioritizedPrice(0, new Amount(12, Currency.USDollar))
}));
var helpers = new ShoppingCartHelpers(
attributeProviders: new[] { new ProductAttributeProvider() },
productService: new FakeProductService(),
moneyService: new TestMoneyService(),
contentDefinitionManager: new FakeContentDefinitionManager());
string serialized = await helpers.Serialize(cart);
ShoppingCart deserialized = await helpers.Deserialize(serialized);
Assert.Equal(cart.Count, deserialized.Count);
Assert.Equal(cart.ItemCount, deserialized.ItemCount);
Assert.Equal(cart.Items, deserialized.Items);
}
}
}
<|start_filename|>OrchardCore.Commerce/Serialization/JsonElementSerializer.cs<|end_filename|>
using System.Text.Json;
namespace OrchardCore.Commerce.Serialization
{
public static class JsonElementSerializer
{
/// <summary>
/// Transforms a JSON element into the required type.
/// </summary>
/// <typeparam name="T">The type to deserialize into.</typeparam>
/// <param name="jsonElement">The element.</param>
/// <returns>The deserialized instance of T.</returns>
/// <remarks>This will no longer be necessary when the BCL supports it natively.</remarks>
public static T ToObject<T>(this JsonElement jsonElement)
{
string elementText = jsonElement.GetRawText();
return JsonSerializer.Deserialize<T>(elementText);
}
}
}
<|start_filename|>OrchardCore.Commerce/Models/ProductPart.cs<|end_filename|>
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.Models
{
/// <summary>
/// The product part describes the most basic product attribute: a SKU.
/// It also identifies any content item as a product, by its mere presence.
/// </summary>
public class ProductPart : ContentPart
{
/// <summary>
/// The product's SKU, which can also be used as an alias for the item.
/// </summary>
public string Sku { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Settings/CurrencySelectionModeEnum.cs<|end_filename|>
using System;
namespace OrchardCore.Commerce.Settings
{
public enum CurrencySelectionModeEnum
{
AllCurrencies,
DefaultCurrency,
SpecificCurrency
}
}
<|start_filename|>OrchardCore.Commerce/Models/OrderPart.cs<|end_filename|>
using System.Collections.Generic;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.Models
{
public class OrderPart : ContentPart
{
/// <summary>
/// The line items in this order.
/// </summary>
public IList<OrderLineItem> LineItems { get; set; }
/// <summary>
/// Additional costs such as taxes and shipping.
/// </summary>
public IList<OrderAdditionalCost> AdditionalCosts {get;set;}
/// <summary>
/// Amounts charged on this order. Typically a single credit card charge.
/// </summary>
public IList<IPayment> Charges { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Handlers/PriceVariantsPartHandler.cs<|end_filename|>
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.ContentManagement.Handlers;
namespace OrchardCore.Commerce.Handlers
{
public class PriceVariantsPartHandler : ContentPartHandler<PriceVariantsPart>
{
private IMoneyService _moneyService;
public PriceVariantsPartHandler(IMoneyService moneyService)
{
_moneyService = moneyService;
}
public override Task LoadingAsync(LoadContentContext context, PriceVariantsPart part)
{
if (part.Variants != null)
{
foreach (var variantKey in part.Variants.Keys)
{
part.Variants[variantKey] = _moneyService.EnsureCurrency(part.Variants[variantKey]);
}
}
return base.LoadingAsync(context, part);
}
}
}
<|start_filename|>OrchardCore.Commerce/Views/ShoppingCart/Index.cshtml<|end_filename|>
@model ShoppingCartViewModel
@using OrchardCore.Commerce.Abstractions
@using Money
<zone name="Header">
<header class="masthead">
<div class="container">
<div class="intro-text">
<div class="intro-lead-in"></div>
<div class="intro-heading text-uppercase">@T["Your cart"]</div>
</div>
</div>
</header>
</zone>
<section id="shopping-cart">
@if (Model.Lines != null && Model.Lines.Any())
{
<div class="container">
<div class="d-none d-sm-flex row border-bottom mt-3 pb-2 font-weight-bold">
<div class="col-xs-12 col-sm-1">@T["Quantity"]</div>
<div class="col-xs-12 col-sm-9">@T["Product"]</div>
<div class="col-xs-12 col-sm-1">@T["Price"]</div>
<div class="col-xs-12 col-sm-1">@T["Action"]</div>
</div>
@{
int line = 0;
}
@foreach (ShoppingCartLineViewModel item in Model.Lines)
{
<div class="row border-bottom mt-3 pb-2">
<div class="col-xs-12 col-sm-1">
@{
int i = 0;
var name = $"cart.lines[{line++}]";
}
<input form="shopping-cart-update" type="number" min="0" name="@(name).Quantity" value="@item.Quantity" class="form-control" />
<input form="shopping-cart-update" type="hidden" name="@(name).ProductSku" value="@item.ProductSku" />
@foreach (IProductAttributeValue attr in item.Attributes.Select(attr => attr.Value).Where(val => val.UntypedValue != null))
{
<shape Type="@attr.GetType().Name" alternate="@(attr.GetType().Name)_hidden" form="shopping-cart-update" prop-index="@(i++)" prop-attributeValue="@attr" prop-namePrefix="@name" />
}
</div>
<div class="col-xs-12 col-sm-9">
<a href="@item.ProductUrl">@item.ProductName</a>
@if (item.Attributes != null)
{
<ul class="cart-product-attributes list-inline">
@foreach (IProductAttributeValue attr in item.Attributes.Select(attr => attr.Value).Where(val => val.UntypedValue != null))
{
<shape tag="li" Type="@attr.GetType().Name" prop-attribute="@attr" prop-label="@attr.Label" />
}
</ul>
}
</div>
<div class="col-xs-12 col-sm-1">@item.UnitPrice</div>
<div class="col-xs-12 col-sm-1">
@{
i = 0;
}
<form asp-action="RemoveItem" asp-controller="ShoppingCart" asp-area="OrchardCore.Commerce">
<input type="hidden" name="line.ProductSku" value="@item.ProductSku" />
@foreach (IProductAttributeValue attr in item.Attributes.Select(attr => attr.Value).Where(val => val.UntypedValue != null))
{
<shape Type="@attr.GetType().Name" alternate="@(attr.GetType().Name)_hidden" prop-index="@(i++)" prop-attributeValue="@attr" />
}
<button type="submit" class="btn btn-primary">@T["Remove"]</button>
</form>
</div>
</div>
}
<div class="row mt-3 mb-3 pb-2">
<div class="col-xs-12 col-sm-10 font-weight-bold">@T["Total"]</div>
<div class="col-xs-12 col-sm-1">
@foreach (Amount total in Model.Totals)
{
<div>@total</div>
}
</div>
<div class="col-xs-12 col-sm-1">
<form asp-action="Update" asp-controller="ShoppingCart" asp-area="OrchardCore.Commerce" id="shopping-cart-update">
<button type="submit" class="btn btn-primary">@T["Update"]</button>
</form>
</div>
</div>
</div>
}
else
{
<p>@T["Your shopping cart is empty."]</p>
}
</section>
<|start_filename|>OrchardCore.Commerce/Abstractions/IProductService.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Abstractions
{
public interface IProductService
{
Task<ProductPart> GetProduct(string sku);
Task<IEnumerable<ProductPart>> GetProducts(IEnumerable<string> skus);
async Task<IDictionary<string, ProductPart>> GetProductDictionary(IEnumerable<string> skus)
=> (await GetProducts(skus)).ToDictionary(product => product.Sku);
}
}
<|start_filename|>OrchardCore.Commerce/ProductAttributeValues/NumericProductAttributeValue.cs<|end_filename|>
namespace OrchardCore.Commerce.ProductAttributeValues
{
public class NumericProductAttributeValue : BaseProductAttributeValue<decimal?>
{
public NumericProductAttributeValue(string attributeName, decimal? value)
: base(attributeName, value) { }
}
}
<|start_filename|>MoneyDataType/CurrencyConverter.cs<|end_filename|>
using System;
using System.Text.Json;
using System.Text.Json.Serialization;
using Money.Abstractions;
namespace Money.Serialization
{
internal class CurrencyConverter : JsonConverter<ICurrency>
{
public override ICurrency Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
=> Currency.FromISOCode(reader.GetString());
public override void Write(Utf8JsonWriter writer, ICurrency value, JsonSerializerOptions options)
=> writer.WriteStringValue(value.CurrencyIsoCode);
}
internal class LegacyCurrencyConverter : Newtonsoft.Json.JsonConverter<ICurrency>
{
public override ICurrency ReadJson(Newtonsoft.Json.JsonReader reader, Type objectType, ICurrency existingValue, bool hasExistingValue, Newtonsoft.Json.JsonSerializer serializer)
=> Currency.FromISOCode(reader.ReadAsString());
public override void WriteJson(Newtonsoft.Json.JsonWriter writer, ICurrency value, Newtonsoft.Json.JsonSerializer serializer)
=> writer.WriteValue(value.CurrencyIsoCode);
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/ICurrencySelector.cs<|end_filename|>
using Money.Abstractions;
namespace OrchardCore.Commerce.Abstractions
{
/// <summary>
/// Implementations of this interface can alter the currency used for showing prices to the customer.
/// </summary>
public interface ICurrencySelector
{
/// <summary>
/// The current currency used for displaying prices to the customer.
/// </summary>
public ICurrency CurrentDisplayCurrency { get; }
}
}
<|start_filename|>OrchardCore.Commerce/Models/PrioritizedPrice.cs<|end_filename|>
using System.Diagnostics;
using System.Text.Json.Serialization;
using Money;
using OrchardCore.Commerce.Serialization;
namespace OrchardCore.Commerce.Models
{
/// <summary>
/// A price and its priority.
/// </summary>
[JsonConverter(typeof(PrioritizedPriceConverter))]
[DebuggerDisplay("{DebuggerDisplay,nq}")]
public class PrioritizedPrice
{
/// <summary>
/// The priority for the price (higher takes precedence).
/// </summary>
public int Priority { get; }
/// <summary>
/// The price.
/// </summary>
public Amount Price { get; }
/// <summary>
/// Builds a new prioritized price from an amount and a priority.
/// </summary>
/// <param name="priority">The piority.</param>
/// <param name="price">The price.</param>
public PrioritizedPrice(int priority, Amount price)
{
Priority = priority;
Price = price;
}
private string DebuggerDisplay => $"{Price} ^{Priority}";
}
}
<|start_filename|>OrchardCore.Commerce.Tests/Fakes/AnkhMorporkCurrencyProvider.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using Money;
using Money.Abstractions;
namespace OrchardCore.Commerce.Tests.Fakes
{
public class AnkhMorporkCurrencyProvider : ICurrencyProvider
{
public static readonly ICurrency AnkhMorporkDollar
= new Currency("Ankh-Morpork Dollar", "Ankh-Morpork Dollar", "$AM", "AMD");
public static readonly ICurrency SixPence
= new Currency("Sixpence", "Sixpence", "6p", "SXP");
private readonly ICurrency[] _currencies = new[] {
AnkhMorporkDollar,
SixPence
};
public IEnumerable<ICurrency> Currencies => _currencies;
public ICurrency GetCurrency(string isoCode)
=> _currencies.FirstOrDefault(c => c.CurrencyIsoCode == isoCode);
public bool IsKnownCurrency(string isoCode) => _currencies.Any(c => string.Equals(c.CurrencyIsoCode, isoCode, System.StringComparison.InvariantCultureIgnoreCase));
}
}
<|start_filename|>OrchardCore.Commerce/Drivers/AddressFieldDisplayDriver.cs<|end_filename|>
using System.Threading.Tasks;
using InternationalAddress;
using Microsoft.AspNetCore.Html;
using OrchardCore.Commerce.Fields;
using OrchardCore.Commerce.ViewModels;
using OrchardCore.ContentManagement.Display.ContentDisplay;
using OrchardCore.ContentManagement.Display.Models;
using OrchardCore.DisplayManagement.Views;
namespace OrchardCore.Commerce.Drivers
{
public class AddressFieldDisplayDriver : ContentFieldDisplayDriver<AddressField>
{
private readonly IAddressFormatterProvider _addressFormatterProvider;
public AddressFieldDisplayDriver(IAddressFormatterProvider addressFormatterProvider)
{
_addressFormatterProvider = addressFormatterProvider;
}
public override IDisplayResult Edit(AddressField addressField, BuildFieldEditorContext context)
{
return Initialize<AddressFieldViewModel>(GetEditorShapeType(context), m => BuildViewModel(m, addressField, context));
}
private Task BuildViewModel(AddressFieldViewModel model, AddressField field, BuildFieldEditorContext context)
{
model.Address = field.Address;
model.AddressHtml
= new HtmlString(_addressFormatterProvider.Format(field.Address).Replace(System.Environment.NewLine, "<br/>"));
model.Regions = Regions.All;
model.Provinces = Regions.Provinces;
model.ContentItem = field.ContentItem;
model.AddressPart = field;
model.PartFieldDefinition = context.PartFieldDefinition;
return Task.CompletedTask;
}
}
}
<|start_filename|>OrchardCore.Commerce/Settings/CommerceSettingsCurrencySelector.cs<|end_filename|>
using Microsoft.Extensions.Options;
using Money;
using Money.Abstractions;
using OrchardCore.Commerce.Abstractions;
namespace OrchardCore.Commerce.Settings
{
public class CommerceSettingsCurrencySelector : ICurrencySelector
{
private readonly CommerceSettings _options;
public CommerceSettingsCurrencySelector(IOptions<CommerceSettings> options)
{
_options = options.Value;
}
public ICurrency CurrentDisplayCurrency => Currency.FromISOCode(_options.CurrentDisplayCurrency);
}
}
<|start_filename|>OrchardCore.Commerce.Tests/Fakes/FakeProductService.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Tests.Fakes
{
public class FakeProductService : IProductService
{
public Task<ProductPart> GetProduct(string sku)
=> Task.FromResult(new ProductPart
{
Sku = sku,
ContentItem = new ContentManagement.ContentItem { ContentType = "Product" }
});
public Task<IEnumerable<ProductPart>> GetProducts(IEnumerable<string> skus)
=> Task.FromResult(skus.Select(sku => GetProduct(sku).Result));
}
}
<|start_filename|>OrchardCore.Commerce/Services/ProductService.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Indexes;
using OrchardCore.Commerce.Models;
using OrchardCore.ContentManagement;
using YesSql;
using YesSql.Services;
namespace OrchardCore.Commerce.Services
{
public class ProductService : IProductService
{
private ISession _session;
private IContentManager _contentManager;
public ProductService(
ISession session,
IContentManager contentManager)
{
_session = session;
_contentManager = contentManager;
}
public async Task<ProductPart> GetProduct(string sku)
{
var contentItemId = (await _session.QueryIndex<ProductPartIndex>(x => x.Sku == sku).FirstOrDefaultAsync())?.ContentItemId;
return contentItemId is null ? null : (await _contentManager.GetAsync(contentItemId)).As<ProductPart>();
}
public async Task<IEnumerable<ProductPart>> GetProducts(IEnumerable<string> skus)
{
var contentItemIds = (await _session
.QueryIndex<ProductPartIndex>(x => x.Sku.IsIn(skus))
.ListAsync())
.Select(idx => idx.ContentItemId)
.Distinct()
.ToArray();
return (await _contentManager.GetAsync(contentItemIds))
.Select(item => item.As<ProductPart>());
}
}
}
<|start_filename|>OrchardCore.Commerce/Drivers/ProductAddedToCartEventDisplay.cs<|end_filename|>
using OrchardCore.Commerce.Activities;
using OrchardCore.Workflows.Display;
namespace OrchardCore.Commerce.Drivers
{
public class ProductAddedToCartEventDisplay : ActivityDisplayDriver<ProductAddedToCartEvent>
{
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IPredefinedValuesProductAttributeService.cs<|end_filename|>
using System.Collections.Generic;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.Abstractions
{
public interface IPredefinedValuesProductAttributeService
{
IEnumerable<ProductAttributeDescription> GetProductAttributesRestrictedToPredefinedValues(ContentItem product);
IEnumerable<IEnumerable<object>> GetProductAttributesPredefinedValues(ContentItem product);
IEnumerable<string> GetProductAttributesCombinations(ContentItem product);
}
}
<|start_filename|>OrchardCore.Commerce/Drivers/ProductPartDisplayDriver.cs<|end_filename|>
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.Commerce.ViewModels;
using OrchardCore.ContentManagement.Display.ContentDisplay;
using OrchardCore.ContentManagement.Display.Models;
using OrchardCore.DisplayManagement.ModelBinding;
using OrchardCore.DisplayManagement.Views;
namespace OrchardCore.Commerce.Drivers
{
public class ProductPartDisplayDriver : ContentPartDisplayDriver<ProductPart>
{
private readonly IProductAttributeService _productAttributeService;
public ProductPartDisplayDriver(IProductAttributeService productAttributeService)
{
_productAttributeService = productAttributeService;
}
public override IDisplayResult Display(ProductPart productPart, BuildPartDisplayContext context)
{
return Initialize<ProductPartViewModel>(GetDisplayShapeType(context), m => BuildViewModel(m, productPart))
.Location("Detail", "Content:20")
.Location("Summary", "Meta:5");
}
public override IDisplayResult Edit(ProductPart productPart, BuildPartEditorContext context)
{
return Initialize<ProductPartViewModel>(GetEditorShapeType(context), m => BuildViewModel(m, productPart));
}
public override async Task<IDisplayResult> UpdateAsync(ProductPart model, IUpdateModel updater, UpdatePartEditorContext context)
{
await updater.TryUpdateModelAsync(model, Prefix, t => t.Sku);
return Edit(model, context);
}
private Task BuildViewModel(ProductPartViewModel model, ProductPart part)
{
model.ContentItem = part.ContentItem;
model.Sku = part.Sku;
model.ProductPart = part;
model.Attributes = _productAttributeService.GetProductAttributeFields(part.ContentItem);
// TODO: filter out of inventory products here as well when we have inventory management
// model.CanBeBought = ...;
return Task.CompletedTask;
}
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/ShoppingCartLineUpdateModel.cs<|end_filename|>
using System.Collections.Generic;
namespace OrchardCore.Commerce.ViewModels
{
public class ShoppingCartLineUpdateModel
{
public int Quantity { get; set; }
public string ProductSku { get; set; }
public IDictionary<string, string[]> Attributes { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/Services/PriceProvider.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.Services
{
/// <summary>
/// A simple price provider that obtains a price from a product by looking for a `PricePart`
/// </summary>
public class PriceProvider : IPriceProvider
{
private readonly IProductService _productService;
private readonly IMoneyService _moneyService;
public PriceProvider(
IProductService productService,
IMoneyService moneyService)
{
_productService = productService;
_moneyService = moneyService;
}
public int Order => 0;
public async Task<IEnumerable<ShoppingCartItem>> AddPrices(IEnumerable<ShoppingCartItem> items)
{
var skus = items.Select(item => item.ProductSku).Distinct().ToArray();
var skuProducts = (await _productService.GetProducts(skus))
.ToDictionary(p => p.Sku);
return items
.Select(item =>
{
if (skuProducts.TryGetValue(item.ProductSku, out var product))
{
var newPrices = product
.ContentItem
.OfType<PricePart>()
.Where(pricePart => pricePart.Price.Currency == _moneyService.CurrentDisplayCurrency)
.Select(pricePart => new PrioritizedPrice(0, pricePart.Price));
return item.WithPrices(newPrices);
}
else
{
return item;
}
});
}
}
}
<|start_filename|>OrchardCore.Commerce.Tests/Fakes/FakePriceService.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Money;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
namespace OrchardCore.Commerce.Tests.Fakes
{
public class FakePriceService : IPriceService
{
public Task<IEnumerable<ShoppingCartItem>> AddPrices(IEnumerable<ShoppingCartItem> items)
=> Task.FromResult(
items.Select(
(item, i) => item.WithPrice(new PrioritizedPrice(0, new Amount(42 + i++, Currency.USDollar)))));
}
}
<|start_filename|>OrchardCore.Commerce/Services/PriceVariantProvider.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using OrchardCore.Commerce.Abstractions;
using OrchardCore.Commerce.Models;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.Services
{
/// <summary>
/// A simple price provider that obtains a price from a product by looking for a `PriceVariantsPart`
/// </summary>
public class PriceVariantProvider : IPriceProvider
{
private readonly IProductService _productService;
private readonly IPredefinedValuesProductAttributeService _predefinedValuesService;
public PriceVariantProvider(IProductService productService, IPredefinedValuesProductAttributeService predefinedValuesService)
{
_productService = productService;
_predefinedValuesService = predefinedValuesService;
}
public int Order => 1;
public async Task<IEnumerable<ShoppingCartItem>> AddPrices(IEnumerable<ShoppingCartItem> items)
{
var skus = items.Select(item => item.ProductSku).Distinct().ToArray();
var skuProducts = (await _productService.GetProducts(skus))
.ToDictionary(p => p.Sku);
return items
.Select(item =>
{
if (skuProducts.TryGetValue(item.ProductSku, out var product))
{
var priceVariantsPart = product.ContentItem.As<PriceVariantsPart>();
if (priceVariantsPart != null && priceVariantsPart.Variants != null)
{
var attributesRestrictedToPredefinedValues = _predefinedValuesService
.GetProductAttributesRestrictedToPredefinedValues(product.ContentItem)
.Select(attr => attr.PartName + "." + attr.Name)
.ToHashSet();
var predefinedAttributes = item.Attributes
.OfType<IPredefinedValuesProductAttributeValue>()
.Where(attribute => attributesRestrictedToPredefinedValues.Contains(attribute.AttributeName))
.OrderBy(x => x.AttributeName);
var variantKey = String.Join(
"-",
predefinedAttributes
.Select(attr => attr.UntypedPredefinedValue)
.Where(value => value != null));
if (priceVariantsPart.Variants.ContainsKey(variantKey))
{
return item.WithPrice(new PrioritizedPrice(1, priceVariantsPart.Variants[variantKey]));
}
}
}
return item;
});
}
}
}
<|start_filename|>OrchardCore.Commerce/Abstractions/IProductAttributeService.cs<|end_filename|>
using System.Collections.Generic;
using OrchardCore.ContentManagement;
namespace OrchardCore.Commerce.Abstractions
{
public interface IProductAttributeService
{
IEnumerable<ProductAttributeDescription> GetProductAttributeFields(ContentItem product);
}
}
<|start_filename|>AddressDataType/Abstractions/IAddressFormatter.cs<|end_filename|>
using System;
namespace InternationalAddress
{
/// <summary>
/// Formats an address.
/// </summary>
public interface IAddressFormatter
{
string Format(Address address)
=> address is null ? "-" : (address.Name
+ (String.IsNullOrWhiteSpace(address.Department) ? "" : Environment.NewLine + address.Department)
+ (String.IsNullOrWhiteSpace(address.Company) ? "" : Environment.NewLine + address.Company)
+ Environment.NewLine + address.StreetAddress1
+ (String.IsNullOrWhiteSpace(address.StreetAddress2) ? "" : Environment.NewLine + address.StreetAddress2)
+ Environment.NewLine + address.City
+ (String.IsNullOrWhiteSpace(address.Province) ? "" : " " + address.Province)
+ " " + address.PostalCode
+ (String.IsNullOrWhiteSpace(address.Region) ? "" : Environment.NewLine + address.Region)
).ToUpper();
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/ShoppingCartLineViewModel.cs<|end_filename|>
using System.Collections.Generic;
using Money;
using OrchardCore.Commerce.Abstractions;
namespace OrchardCore.Commerce.ViewModels
{
public class ShoppingCartLineViewModel
{
public int Quantity { get; set; }
public string ProductSku { get; set; }
public string ProductName { get; set; }
public string ProductUrl { get; set; }
public string ProductImageUrl { get; set; }
public Amount UnitPrice { get; set; }
public Amount LinePrice { get; set; }
public IDictionary<string, IProductAttributeValue> Attributes { get; set; }
}
}
<|start_filename|>OrchardCore.Commerce/ViewModels/AddressFieldViewModel.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using InternationalAddress;
using Microsoft.AspNetCore.Html;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using OrchardCore.Commerce.Fields;
using OrchardCore.ContentManagement;
using OrchardCore.ContentManagement.Metadata.Models;
namespace OrchardCore.Commerce.ViewModels
{
public class AddressFieldViewModel
{
public Address Address { get; set; }
[BindNever]
public HtmlString AddressHtml { get; set; }
[BindNever]
public IList<RegionInfo> Regions { get; set; }
[BindNever]
public IDictionary<string, IDictionary<string, string>> Provinces { get; set; }
[BindNever]
public ContentItem ContentItem { get; set; }
[BindNever]
public AddressField AddressPart { get; set; }
[BindNever]
public ContentPartFieldDefinition PartFieldDefinition { get; set; }
}
}
| microposmp/OrchardCore.Commerce |
<|start_filename|>dist/pvpc-hourly-pricing-card.js<|end_filename|>
const LitElement =
window.LitElement ||
Object.getPrototypeOf(customElements.get('ha-panel-lovelace') || customElements.get('hc-lovelace'));
const { html, css } = LitElement.prototype;
const locale = {
ca: {
minPrice: 'Preu mínim avui:',
maxPrice: 'Preu màxim avui:',
minPriceNextDay: 'Preu mínim demà:',
maxPriceNextDay: 'Preu màxim demà:',
infoNoNextDay: 'Les dades de demà encara no estan disponibles',
from: 'de',
to: 'a',
optionName: 'Nom (Opcional)',
optionEntity: 'Entitat (Necessari)',
optionShowCurrent: 'Mostrar Estat Actual',
optionShowDetails: 'Mostrar Detalls',
optionShowGraph: 'Mostrar Gràfic',
optionShowInfo: 'Mostrar Informació'
},
da: {
minPrice: 'Minimumspris i dag:',
maxPrice: 'Maksimal pris i dag:',
minPriceNextDay: 'Minimumspris i morgen:',
maxPriceNextDay: 'Maksimal pris i morgen:',
infoNoNextDay: 'Morgendagens data er endnu ikke tilgængelige',
from: 'fra',
to: 'til',
optionName: 'Navn (valgfrit)',
optionEntity: 'Enhed (påkrævet)',
optionShowCurrent: 'Vis nuværende status',
optionShowDetails: 'Vis detaljer',
optionShowGraph: 'Vis graf',
optionShowInfo: 'Vis information'
},
de: {
minPrice: 'Mindestpreis heute:',
maxPrice: 'Maximaler preis heute:',
minPriceNextDay: 'Mindestpreis morgen:',
maxPriceNextDay: 'Maximaler preis morgen:',
infoNoNextDay: 'Die Daten von morgen sind noch nicht verfügbar',
from: 'von',
to: 'bis',
optionName: 'Name (optional)',
optionEntity: 'Entity (Erforderlich)',
optionShowCurrent: 'Aktuellen Status anzeigen',
optionShowDetails: 'Details anzeigen',
optionShowGraph: 'Grafik anzeigen',
optionShowInfo: 'Informationen anzeigen'
},
en: {
minPrice: 'Lowest price today:',
maxPrice: 'Highest price today:',
minPriceNextDay: 'Lowest price tomorrow:',
maxPriceNextDay: 'Highest price tomorrow:',
infoNoNextDay: "Tomorrow's data is not yet available",
from: 'from',
to: 'to',
optionName: 'Name (Optional)',
optionEntity: 'Entity (Required)',
optionShowCurrent: 'Show Current State',
optionShowDetails: 'Show Details',
optionShowGraph: 'Show Graph',
optionShowInfo: 'Show Info'
},
es: {
minPrice: 'Precio mínimo hoy:',
maxPrice: 'Precio máximo hoy:',
minPriceNextDay: 'Precio mínimo mañana:',
maxPriceNextDay: 'Precio máximo mañana:',
infoNoNextDay: 'Los datos de mañana no están disponibles aún',
from: 'de',
to: 'a',
optionName: 'Nombre (Opcional)',
optionEntity: 'Entidad (Necesario)',
optionShowCurrent: 'Mostrar Estado Actual',
optionShowDetails: 'Mostrar Detalles',
optionShowGraph: 'Mostrar Gráfico',
optionShowInfo: 'Mostrar Información'
},
fr: {
minPrice: "Prix minimum aujourd'hui:",
maxPrice: "Prix maximum aujourd'hui:",
minPriceNextDay: 'Prix minimum demain:',
maxPriceNextDay: 'Prix maximum demain:',
infoNoNextDay: 'Les données de demain ne sont pas encore disponibles',
from: 'de',
to: 'à',
optionName: 'Nom (Facultatif)',
optionEntity: 'Entity (Required)',
optionShowCurrent: "Afficher l'état actuel",
optionShowDetails: 'Afficher les détails',
optionShowGraph: 'Afficher le graphique',
optionShowInfo: 'Afficher les informations'
},
nl: {
minPrice: 'Minimumspris i dag:',
maxPrice: 'Maksimal pris i dag:',
minPriceNextDay: 'Minimum prijs morgen:',
maxPriceNextDay: 'Maximale prijs morgen:',
infoNoNextDay: 'De gegevens van morgen zijn nog niet beschikbaar',
from: 'fra',
to: 'til',
optionName: 'Naam (optioneel)',
optionEntity: 'Entiteit (vereist)',
optionShowCurrent: 'Toon huidige status',
optionShowDetails: 'Details weergeven',
optionShowGraph: 'Show Graph',
optionShowInfo: 'Informatie weergeven'
},
ru: {
minPrice: 'Минимальная цена сегодня:',
maxPrice: 'Максимальная цена сегодня:',
minPriceNextDay: 'Минимальная цена завтра:',
maxPriceNextDay: 'Максимальная цена завтра:',
infoNoNextDay: 'Данные завтра еще не доступны',
from: 'С',
to: 'до',
optionName: 'Имя (необязательно)',
optionEntity: 'Entity (обязательно)',
optionShowCurrent: 'Показать текущий статус',
optionShowDetails: 'Показать детали',
optionShowGraph: 'Показать график',
optionShowInfo: 'Показать информацию'
},
sv: {
minPrice: 'Lägsta pris idag:',
maxPrice: 'Maxpris idag:',
minPriceNextDay: 'Lägsta pris imorgon:',
maxPriceNextDay: 'Maxpris i morgon:',
infoNoNextDay: 'Morgondagens data är ännu inte tillgängliga',
from: '',
to: 'till',
optionName: 'Namn (valfritt)',
optionEntity: 'Enhet (obligatoriskt)',
optionShowCurrent: 'Visa aktuell status',
optionShowDetails: 'Visa detaljer',
optionShowGraph: 'Visa graf',
optionShowInfo: 'Visa information'
}
};
const tariffPeriodIconColors = {
Error: '--error-color',
P3: '--success-color',
P2: '--warning-color',
P1: '--error-color'
};
const tariffPeriodIcons = {
Error:
'M 28.342306,10.429944 27.798557,32.995546 H 24.243272 L 23.657695,10.429944 Z M 28.133172,41.570057 H 23.86683 v -4.412736 h 4.266342 z',
P3: 'm 2.5238392,17.238401 a 25.003164,25.003164 0 0 0 -0.6133588,1.888945 h 8.6436716 l 15.49805,22.870055 15.121052,-22.870055 h 8.891749 A 25.003164,25.003164 0 0 0 49.436017,17.238401 H 40.038344 L 26.052202,38.327015 12.06606,17.238401 Z',
P2: 'M 31.032172,16.612305 20.999855,32.113255 15.66609,25.065424 H 0.97821381 a 25.017275,25.017275 0 0 0 -0.0332829,0.949884 25.017275,25.017275 0 0 0 0.0468985,0.940092 H 14.800215 l 6.199595,8.453119 10.03232,-15.502917 5.335714,7.049798 h 14.578421 a 25.017275,25.017275 0 0 0 0.03328,-0.940092 25.017275,25.017275 0 0 0 -0.0469,-0.949884 H 37.233737 Z',
P1: 'M 2.5238392,34.768609 A 25.003164,25.003164 0 0 1 1.9104804,32.879664 h 8.6436716 l 15.49805,-22.870055 15.121052,22.870055 h 8.891749 a 25.003164,25.003164 0 0 1 -0.628986,1.888945 H 40.038344 L 26.052202,13.679995 12.06606,34.768609 Z'
};
const fireEvent = (node, type, detail, options) => {
options = options || {};
detail = detail === null || detail === undefined ? {} : detail;
const event = new Event(type, {
bubbles: options.bubbles === undefined ? true : options.bubbles,
cancelable: Boolean(options.cancelable),
composed: options.composed === undefined ? true : options.composed
});
event.detail = detail;
node.dispatchEvent(event);
return event;
};
function hasConfigOrEntityChanged(element, changedProps) {
if (changedProps.has('_config')) {
return true;
}
const oldHass = changedProps.get('hass');
if (oldHass) {
return oldHass.states[element._config.entity] !== element.hass.states[element._config.entity];
}
return true;
}
class PVPCHourlyPricingCard extends LitElement {
static get properties() {
return {
_config: { type: Object },
hass: { type: Object }
};
}
static getConfigElement() {
return document.createElement('pvpc-hourly-pricing-card-editor');
}
static getStubConfig(hass, entities, entitiesFallback) {
const entity = Object.keys(hass.states).find((eid) =>
Object.keys(hass.states[eid].attributes).some((aid) => aid == 'min_price_at')
);
return { entity: entity };
}
setConfig(config) {
if (!config.entity) {
throw new Error('Please define a "Spain electricity hourly pricing (PVPC)" entity');
}
this._config = config;
this.setPVPCHourlyPricingObj();
}
setPVPCHourlyPricingObj() {
if (!this.hass) return;
this.pvpcHourlyPricingObj = this._config.entity in this.hass.states ? this.hass.states[this._config.entity] : null;
if (!this.pvpcHourlyPricingObj) return;
this.despiction = this.getDespiction(this.pvpcHourlyPricingObj.attributes);
}
shouldUpdate(changedProps) {
return hasConfigOrEntityChanged(this, changedProps);
}
updated(param) {
this.setPVPCHourlyPricingObj();
let chart = this.shadowRoot.getElementById('Chart');
if (chart) {
chart.type = this.ChartData.type;
chart.data = this.ChartData.data;
chart.options = this.ChartData.options;
}
}
render() {
if (!this._config || !this.hass) {
return html``;
}
this.setPVPCHourlyPricingObj();
this.numberElements = 0;
this.lang = this.hass.selectedLanguage || this.hass.language;
if (!this.pvpcHourlyPricingObj) {
return html`
<style>
.not-found {
flex: 1;
background-color: yellow;
padding: 8px;
}
</style>
<ha-card>
<div class="not-found">Entity not available: ${this._config.entity}</div>
</ha-card>
`;
}
return html`
<ha-card header="${this._config.name ? this._config.name : ''}">
${this._config.current !== false ? this.renderCurrent() : ''}
${this._config.details !== false ? this.renderDetails() : ''}
${this._config.graph !== false ? this.renderGraph() : ''}
${this._config.info !== false ? this.renderInfo() : ''}
</ha-card>
`;
}
renderCurrent() {
this.numberElements++;
const tariffPeriod = this.pvpcHourlyPricingObj.attributes.period || 'Error';
const style = getComputedStyle(document.body);
const iconColor = style.getPropertyValue(tariffPeriodIconColors[tariffPeriod]);
return html`
<div class="current tappable ${this.numberElements > 1 ? 'spacer' : ''}" @click="${this._handleClick}">
<svg class="period-icon" viewBox="0 0 52 52">
<circle fill="${iconColor}" r="25" cy="26" cx="26" />
<path fill="#f9f9f9" d="${tariffPeriodIcons[tariffPeriod]}" />
</svg>
<span class="currentPrice">${this.getFixedFloat(this.pvpcHourlyPricingObj.state)}</span>
<span class="currentPriceUnit"> ${this.pvpcHourlyPricingObj.attributes.unit_of_measurement}</span>
</div>
`;
}
renderDetails() {
if (!this.despiction) {
return html``;
}
const minPrice = this.getFixedFloat(this.despiction.minPrice);
const minPriceFrom = this.getTimeString(new Date().setHours(this.despiction.minIndex, 0));
const minPriceTo = this.getTimeString(new Date().setHours(this.despiction.minIndex + 1, 0));
const maxPrice = this.getFixedFloat(this.despiction.maxPrice);
const maxPriceFrom = this.getTimeString(new Date().setHours(this.despiction.maxIndex, 0));
const maxPriceTo = this.getTimeString(new Date().setHours(this.despiction.maxIndex + 1, 0));
const minPriceNextDay = this.getFixedFloat(this.despiction.minPriceNextDay);
const minPriceFromNextDay = this.getTimeString(new Date().setHours(this.despiction.minIndexNextDay, 0));
const minPriceToNextDay = this.getTimeString(new Date().setHours(this.despiction.minIndexNextDay + 1, 0));
const maxPriceNextDay = this.getFixedFloat(this.despiction.maxPriceNextDay);
const maxPriceFromNextDay = this.getTimeString(new Date().setHours(this.despiction.maxIndexNextDay, 0));
const maxPriceToNextDay = this.getTimeString(new Date().setHours(this.despiction.maxIndexNextDay + 1, 0));
this.numberElements++;
return html`
<ul class="details tappable ${this.numberElements > 1 ? 'spacer' : ''}" @click="${this._handleClick}">
<li>
<ha-icon icon="mdi:thumb-up-outline"></ha-icon>
${this.ll('minPrice')} ${minPrice}${this.pvpcHourlyPricingObj.attributes.unit_of_measurement}
${this.ll('from')} ${minPriceFrom} ${this.ll('to')} ${minPriceTo}
</li>
<li>
<ha-icon icon="mdi:thumb-down-outline"></ha-icon>
${this.ll('maxPrice')} ${maxPrice}${this.pvpcHourlyPricingObj.attributes.unit_of_measurement}
${this.ll('from')} ${maxPriceFrom} ${this.ll('to')} ${maxPriceTo}
</li>
${this.despiction.minPriceNextDay
? html` <li>
<ha-icon icon="mdi:thumb-up-outline"></ha-icon>
${this.ll('minPriceNextDay')}
${minPriceNextDay}${this.pvpcHourlyPricingObj.attributes.unit_of_measurement} ${this.ll('from')}
${minPriceFromNextDay} ${this.ll('to')} ${minPriceToNextDay}
</li>
<li>
<ha-icon icon="mdi:thumb-down-outline"></ha-icon>
${this.ll('maxPriceNextDay')}
${maxPriceNextDay}${this.pvpcHourlyPricingObj.attributes.unit_of_measurement} ${this.ll('from')}
${maxPriceFromNextDay} ${this.ll('to')} ${maxPriceToNextDay}
</li>`
: ''}
</ul>
`;
}
renderGraph() {
if (!this.despiction) {
return html``;
}
this.numberElements++;
this.drawChart();
return html`
<div class="clear ${this.numberElements > 1 ? 'spacer' : ''}">
<ha-chart-base id="Chart"></ha-chart-base>
</div>
`;
}
renderInfo() {
if (!this.despiction) {
return html``;
}
this.numberElements++;
if (!this.despiction.minPriceNextDay) {
return html`
<div class="info clear ${this.numberElements > 1 ? 'spacer' : ''}">${this.ll('infoNoNextDay')}</div>
`;
} else {
return html``;
}
}
drawChart() {
if (!this.despiction) return;
const that = this;
const style = getComputedStyle(document.body);
const selectionColor = style.getPropertyValue('--secondary-text-color');
const todayColor = '#377eb8';
const tomorrowColor = '#ff7f00';
const today = new Date();
const minIndex = this.despiction.minIndex;
const maxIndex = this.despiction.maxIndex;
const minIndexNextDay = this.despiction.minIndexNextDay;
const maxIndexNextDay = this.despiction.maxIndexNextDay;
const hasNextDayData = this.despiction.pricesNextDay[0] !== undefined;
const minIcon = '▼';
const maxIcon = '▲';
const chartOptions = {
type: 'line',
data: {
labels: this.despiction.dateTime,
datasets: [
{
label: that.getDateString(today),
data: this.despiction.prices,
pointRadius: 0,
borderColor: todayColor,
backgroundColor: todayColor + '7F',
fill: false,
stepped: 'before'
}
]
},
options: {
animation: {
duration: 0,
easing: 'linear',
onComplete: function (context) {
const chartInstance = context.chart;
const ctx = chartInstance.ctx;
const meta = chartInstance._metasets[0];
ctx.save();
const selectedIndex =
chartInstance._active && chartInstance._active.length > 0 && chartInstance._active[0].index < 24
? chartInstance._active[0].index
: today.getHours();
const yaxis = chartInstance.chartArea;
const xBarStart = meta.data[selectedIndex].x;
const xBarEnd = meta.data[selectedIndex + 1].x;
const yBarStart = yaxis.top;
const yBarEnd = yaxis.bottom;
ctx.globalAlpha = 0.3;
ctx.fillStyle = selectionColor;
ctx.fillRect(xBarStart, yBarStart, xBarEnd - xBarStart, yBarEnd - yBarStart);
ctx.restore();
ctx.textAlign = 'center';
ctx.textBaseline = 'middle';
const minBarStart = meta.data[minIndex];
const minBarEnd = meta.data[minIndex + 1];
const pointToPointCenterXOffset = (minBarEnd.x - minBarStart.x) / 2;
const maxBar = meta.data[maxIndex];
const iconYOffset = 8;
ctx.fillStyle = meta.dataset.options.borderColor;
ctx.fillText(minIcon, minBarStart.x + pointToPointCenterXOffset, minBarStart.y - iconYOffset);
ctx.fillText(maxIcon, maxBar.x + pointToPointCenterXOffset, maxBar.y - iconYOffset);
if (hasNextDayData) {
const meta_next_day = chartInstance._metasets[1];
const minNextDayBar = meta_next_day.data[minIndexNextDay];
const maxNextDayBar = meta_next_day.data[maxIndexNextDay];
ctx.fillStyle = meta_next_day.dataset.options.borderColor;
ctx.fillText(minIcon, minNextDayBar.x + pointToPointCenterXOffset, minNextDayBar.y - iconYOffset);
ctx.fillText(maxIcon, maxNextDayBar.x + pointToPointCenterXOffset, maxNextDayBar.y - iconYOffset);
}
}
},
scales: {
x: {
type: 'time',
adapters: {
date: {
locale: this.hass.locale
}
},
ticks: {
maxRotation: 0,
sampleSize: 5,
autoSkipPadding: 20
},
time: {
tooltipFormat: 'hours'
}
},
y: {
ticks: {
maxTicksLimit: 7
},
title: {
display: true,
text: that.pvpcHourlyPricingObj.attributes.unit_of_measurement
}
}
},
interaction: {
intersect: false,
mode: 'index'
},
plugins: {
tooltip: {
callbacks: {
title: function (tooltipItems, data) {
let index =
tooltipItems[0].dataIndex != 24 ? tooltipItems[0].dataIndex : (tooltipItems[0].dataIndex = 23);
let date = new Date(new Date().setHours(index, 0));
let initDate = that.getTimeString(date);
let endDate = that.getTimeString(date.setHours(date.getHours() + 1));
return initDate + ' - ' + endDate;
},
label: function (tooltipItem, data) {
let icon;
const index = tooltipItem.dataIndex != 24 ? tooltipItem.dataIndex : (tooltipItem.dataIndex = 23);
if (tooltipItem.datasetIndex === 0) {
if (index == minIndex) {
icon = minIcon;
} else if (index == maxIndex) {
icon = maxIcon;
}
} else if (tooltipItem.datasetIndex === 1) {
if (index == minIndexNextDay) {
icon = minIcon;
} else if (index == maxIndexNextDay) {
icon = maxIcon;
}
}
const labelTitle = tooltipItem.dataset.label || '';
const label =
labelTitle +
': ' +
parseFloat(tooltipItem.raw).toFixed(5) +
' ' +
that.pvpcHourlyPricingObj.attributes.unit_of_measurement +
' ';
return icon ? label + icon : label;
}
}
},
filler: {
propagate: true
},
legend: {
display: true,
labels: {
usePointStyle: true
}
}
},
elements: {
line: {
tension: 0.1,
borderWidth: 1.5
},
point: {
hitRadius: 0,
hoverRadius: 0
}
}
}
};
if (hasNextDayData) {
chartOptions.data.datasets.push({
label: that.getDateString(today.setDate(today.getDate() + 1)),
data: this.despiction.pricesNextDay,
pointRadius: 0,
borderColor: tomorrowColor,
backgroundColor: tomorrowColor + '7F',
fill: false,
stepped: 'before'
});
}
this.ChartData = chartOptions;
}
getDespiction(attributes) {
const today = new Date();
const priceRegex = /price_\d\dh/;
const priceNextDayRegex = /price_(next|last)_day_\d\dh/;
const priceArray = Object.keys(attributes)
.filter((key) => priceRegex.test(key))
.map((key) => attributes[key]);
const priceNextDayArray = Object.keys(attributes)
.filter((key) => priceNextDayRegex.test(key))
.map((key) => attributes[key]);
let data = [];
let dateTime = [];
let prices = [];
let pricesNextDay = [];
for (let index = 0; index < 24; index++) {
dateTime.push(new Date(today.setHours(index, 0)));
prices.push(priceArray[index]);
pricesNextDay.push(priceNextDayArray[index]);
}
dateTime.push(new Date(today.setHours(24, 0)));
prices.push(priceArray[23]);
pricesNextDay.push(priceNextDayArray[23]);
data.dateTime = dateTime;
data.prices = prices;
data.pricesNextDay = pricesNextDay;
data.minPrice = Math.min.apply(null, prices);
data.maxPrice = Math.max.apply(null, prices);
data.minIndex = prices.indexOf(data.minPrice);
data.maxIndex = prices.indexOf(data.maxPrice);
data.minPriceNextDay = Math.min.apply(null, pricesNextDay);
data.maxPriceNextDay = Math.max.apply(null, pricesNextDay);
data.minIndexNextDay = pricesNextDay.indexOf(data.minPriceNextDay);
data.maxIndexNextDay = pricesNextDay.indexOf(data.maxPriceNextDay);
return data;
}
getDateString(datetime) {
return new Date(datetime).toLocaleDateString(this.lang, {
day: '2-digit',
month: '2-digit',
year: 'numeric'
});
}
getTimeString(datetime) {
return new Date(datetime).toLocaleTimeString(this.lang, { hour: '2-digit', minute: '2-digit', hour12: false });
}
getFixedFloat(number) {
return parseFloat(number).toFixed(5);
}
_handleClick() {
fireEvent(this, 'hass-more-info', { entityId: this._config.entity });
}
getCardSize() {
return this.numberElements || 3;
}
static get styles() {
return css`
ha-card {
margin: auto;
padding-top: 1.3em;
padding-bottom: 1.3em;
padding-left: 1em;
padding-right: 1em;
position: relative;
}
ha-icon {
color: var(--paper-item-icon-color);
}
.spacer {
padding-top: 1em;
}
.clear {
clear: both;
}
.tappable {
cursor: pointer;
}
.current {
height: 5.5em;
position: relative;
display: flex;
align-items: center;
justify-content: space-between;
}
.period-icon {
padding-left: 16px;
padding-right: 16px;
width: 5.5em;
height: 5.5em;
}
.currentPrice {
font-weight: 300;
font-size: 4em;
color: var(--primary-text-color);
margin-top: 0.5em;
margin-right: 8px;
}
.currentPriceUnit {
font-weight: 300;
font-size: 1.5em;
vertical-align: super;
color: var(--primary-text-color);
right: 0em;
top: 0em;
position: absolute;
margin-right: 8px;
}
.details {
font-weight: 300;
color: var(--primary-text-color);
list-style: none;
padding-right: 1em;
padding-left: 1em;
}
.details li {
display: flex;
align-items: center;
justify-content: flex-start;
}
.details ha-icon {
height: 22px;
margin-right: 4px;
}
.info {
color: var(--primary-text-color);
text-align: center;
padding-right: 1em;
padding-left: 1em;
}
`;
}
ll(str) {
if (locale[this.lang] === undefined) return locale.en[str];
return locale[this.lang][str];
}
}
customElements.define('pvpc-hourly-pricing-card', PVPCHourlyPricingCard);
export class PVPCHourlyPricingCardEditor extends LitElement {
setConfig(config) {
this._config = { ...config };
}
static get properties() {
return { hass: {}, _config: {} };
}
get _entity() {
return this._config.entity || '';
}
get _name() {
return this._config.name || '';
}
get _current() {
return this._config.current !== false;
}
get _details() {
return this._config.details !== false;
}
get _graph() {
return this._config.graph !== false;
}
get _info() {
return this._config.info !== false;
}
render() {
if (!this.hass) {
return html``;
}
this.lang = this.hass.selectedLanguage || this.hass.language;
const entities = Object.keys(this.hass.states).filter((eid) =>
Object.keys(this.hass.states[eid].attributes).some((aid) => aid == 'min_price_at')
);
return html`
<div class="card-config">
<div class="side-by-side">
<paper-input
label="${this.ll('optionName')}"
.value="${this._name}"
.configValue="${'name'}"
@value-changed="${this._valueChanged}"
>
</paper-input>
</div>
<div class="side-by-side">
<paper-dropdown-menu
label="${this.ll('optionEntity')}"
@value-changed="${this._valueChanged}"
.configValue="${'entity'}"
>
<paper-listbox slot="dropdown-content" .selected="${entities.indexOf(this._entity)}">
${entities.map((entity) => {
return html` <paper-item>${entity}</paper-item> `;
})}
</paper-listbox>
</paper-dropdown-menu>
</div>
<div class="side-by-side">
<div>
<ha-switch
.checked=${this._current}
.configValue="${'current'}"
@change="${this._valueChanged}"
></ha-switch>
<label class="mdc-label">${this.ll('optionShowCurrent')}</label>
</div>
<div>
<ha-switch
.checked=${this._details}
.configValue="${'details'}"
@change="${this._valueChanged}"
></ha-switch>
<label class="mdc-label">${this.ll('optionShowDetails')}</label>
</div>
</div>
<div class="side-by-side">
<div>
<ha-switch .checked=${this._graph} .configValue="${'graph'}" @change="${this._valueChanged}"></ha-switch>
<label class="mdc-label">${this.ll('optionShowGraph')}</label>
</div>
<div>
<ha-switch .checked=${this._info} .configValue="${'info'}" @change="${this._valueChanged}"></ha-switch>
<label class="mdc-label">${this.ll('optionShowInfo')}</label>
</div>
</div>
</div>
`;
}
_valueChanged(ev) {
if (!this._config || !this.hass) {
return;
}
const target = ev.target;
if (this[`_${target.configValue}`] === target.value) {
return;
}
if (target.configValue) {
if (target.value === '') {
delete this._config[target.configValue];
} else {
this._config = {
...this._config,
[target.configValue]: target.checked !== undefined ? target.checked : target.value
};
}
}
fireEvent(this, 'config-changed', { config: this._config });
}
ll(str) {
if (locale[this.lang] === undefined) return locale.en[str];
return locale[this.lang][str];
}
static get styles() {
return css`
ha-switch {
padding-top: 16px;
}
.mdc-label {
margin-left: 12px;
vertical-align: text-bottom;
}
.side-by-side {
display: flex;
}
.side-by-side > * {
flex: 1;
padding-right: 4px;
}
`;
}
}
customElements.define('pvpc-hourly-pricing-card-editor', PVPCHourlyPricingCardEditor);
window.customCards = window.customCards || [];
window.customCards.push({
type: 'pvpc-hourly-pricing-card',
name: 'PVPC Hourly Pricing',
preview: true,
description: 'The PVPC Hourly Pricing card allows you to display propertly the PVPC Hourly Pricing entity.'
});
| FragMenthor/pvpc-hourly-pricing-card |
<|start_filename|>resources/style.css<|end_filename|>
.event_provider_name {
color: gray;
font-size: 10px;
}
.event_time {
font-size: 16px;
}
.event_header_label {
font-size: 20px;
font-weight: bold;
}
.event_source_name {
border-left: 5px solid black;
font-size: 15px;
font-weight: bold;
padding: 5px 5px 5px 10px;
padding-left: 10px;
margin-bottom: 5px;
}
.event_source_config_label {
opacity: 0.6;
font-weight: bold;
}
.items_frame {
background-color: @theme_base_color;
}
.item_list {
background-color: @theme_bg_color;
}
| j-veylop/cigale |
<|start_filename|>_/Section 10/7_elearn/routes/index.js<|end_filename|>
var express = require('express');
var router = express.Router();
var Class = require('../models/class');
/* GET home page. */
router.get('/', function(req, res, next) {
Class.getClasses(function(err, classes){
res.render('index', { classes: classes });
},3);
});
module.exports = router;
<|start_filename|>_/Section 11/7_recipebook/public/css/style.css<|end_filename|>
body{
margin-top:10px;
background:#f9f9f9;
}
<|start_filename|>_/Section 6/6_TekBooks/public/css/style.css<|end_filename|>
ul, li{
list-style: none;
}
.top-bar{
margin-bottom:20px;
}
.book{
text-align: center;
}
.book h4{
margin-top: 15px;
}
.price{
margin:5px;
font-size:18px;
}
.price span{
color:green;
background:#f4f4f4;
padding:10px;
border: 1px #ccc solid;
border-radius: 50%;
}
footer{
margin-top: 40px;
padding:10px;
text-align: center;
}
h2 span{font-size:18px;}
<|start_filename|>_/Section 10/6_elearn/models/class.js<|end_filename|>
var mongoose = require('mongoose');
// Class Schema
var ClassSchema = mongoose.Schema({
title: {
type: String
},
description: {
type: String
},
instructor:{
type:String
},
lessons:[{
lesson_number: {type: Number},
lesson_title: {type: String},
lesson_body:{type: String}
}]
});
var Class = module.exports = mongoose.model('Class', ClassSchema);
// Fetch All Classes
module.exports.getClasses = function(callback, limit){
Class.find(callback).limit(limit);
}
// Fetch Single Class
module.exports.getClassById = function(id, callback){
Class.findById(id, callback);
}
<|start_filename|>_/Section 6/6_TekBooks/.build/js/main.js<|end_filename|>
$(document).ready(function(){
$('.removeBook').click(function(e){
deleteId = $(this).data('id');
$.ajax({
url:'/manage/books/delete/'+deleteId,
type: 'DELETE',
success: function(){
}
});
//window.location = '/manage/books';
});
});
<|start_filename|>_/Section 10/5_elearn/models/student.js<|end_filename|>
var mongoose = require('mongoose');
// Student Schema
var StudentSchema = mongoose.Schema({
first_name: {
type: String
},
last_name: {
type: String
},
address: [{
street_address:{type: String},
city:{type: String},
state:{type: String},
zip:{type: String}
}],
username: {
type: String
},
email: {
type: String
},
classes:[{
class_id:{type: [mongoose.Schema.Types.ObjectId]},
class_title: {type:String}
}]
});
var Student = module.exports = mongoose.model('Student', StudentSchema);
<|start_filename|>_/Section 3/2_express_website/app.js<|end_filename|>
var express = require('express');
var path = require('path');
var bodyParser = require('body-parser');
var nodemailer = require('nodemailer');
var app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended: false}));
app.get('/', function(req, res){
res.send('<h1>Hello World</h1>');
});
app.listen(3000);
console.log('Server is running on port 3000...');
<|start_filename|>_/Section 12/4_albumz/routes/genres.js<|end_filename|>
var express = require('express');
var router = express.Router();
var Firebase = require('firebase');
var fbRef = new Firebase('https://albumz01.firebaseio.com/');
router.get('/', function(req, res, next) {
res.render('genres/index');
});
router.get('/add', function(req, res, next) {
res.render('genres/add');
});
router.post('/add', function(req, res, next) {
var genre = {
name: req.body.name
}
var genreRef = fbRef.child('genres');
genreRef.push().set(genre);
req.flash('success_msg', 'Genre Saved');
res.redirect('/genres');
});
module.exports = router;
<|start_filename|>_/Section 10/7_elearn/routes/instructors.js<|end_filename|>
var express = require('express');
var router = express.Router();
Class = require('../models/class');
Instructor = require('../models/instructor');
User = require('../models/user');
router.get('/classes', function(req, res, next){
Instructor.getInstructorByUsername(req.user.username, function(err, instructor){
if(err) throw err;
res.render('instructors/classes', {instructor: instructor});
});
});
router.post('/classes/register', function(req, res){
info = [];
info['instructor_username'] = req.user.username;
info['class_id'] = req.body.class_id;
info['class_title'] = req.body.class_title;
Instructor.register(info, function(err, instructor){
if(err) throw err;
console.log(instructor);
});
req.flash('success_msg', 'You are now registered to teach this class');
res.redirect('/instructors/classes');
});
router.get('/classes/:id/lessons/new', function(req, res, next){
res.render('instructors/newlesson',{class_id:req.params.id});
});
router.post('/classes/:id/lessons/new', function(req, res, next){
// Get Values
var info = [];
info['class_id'] = req.params.id;
info['lesson_number'] = req.body.lesson_number;
info['lesson_title'] = req.body.lesson_title;
info['lesson_body'] = req.body.lesson_body;
Class.addLesson(info, function(err, lesson){
console.log('Lesson Added..');
});
req.flash('success_msg','Lesson Added');
res.redirect('/instructors/classes');
});
module.exports = router;
<|start_filename|>_/Section 12/9_albumz/routes/users.js<|end_filename|>
var express = require('express');
var router = express.Router();
var Firebase = require('firebase');
var fbRef = new Firebase('https://albumz01.firebaseio.com/');
router.get('/register', function(req, res, next) {
res.render('users/register');
});
router.get('/login', function(req, res, next) {
res.render('users/login');
});
router.post('/register', function(req, res, next) {
var first_name = req.body.first_name;
var last_name = req.body.last_name;
var email = req.body.email;
var password = <PASSWORD>;
var password2 = <PASSWORD>;
var location = req.body.location;
var fav_artists = req.body.fav_artists;
var fav_genres = req.body.fav_genres;
// Validation
req.checkBody('first_name', 'First name is required').notEmpty();
req.checkBody('email', 'Email is required').notEmpty();
req.checkBody('email', 'Email is not valid').isEmail();
req.checkBody('password', '<PASSWORD>').notEmpty();
req.checkBody('password2', 'Passwords do not match').equals(req.body.password);
var errors = req.validationErrors();
if(errors){
res.render('users/register', {
errors: errors
});
} else {
fbRef.createUser({
email: email,
password: password
}, function(error, userData){
if(error){
console.log("Error creating user: ", error);
} else {
console.log("Successfully created user with uid:",userData.uid);
var user = {
uid: userData.uid,
email: email,
first_name: first_name,
last_name: last_name,
location: location,
fav_genres: fav_genres,
fav_artists: fav_artists
}
var userRef = fbRef.child('users');
userRef.push().set(user);
req.flash('success_msg', 'You are now registered and can login');
res.redirect('/users/login');
}
});
}
});
router.post('/login', function(req, res, next) {
var email = req.body.email;
var password = req.body.password;
// Validation
req.checkBody('email', 'Email is required').notEmpty();
req.checkBody('email', 'Email is not valid').isEmail();
req.checkBody('password', 'Password is required').notEmpty();
var errors = req.validationErrors();
if(errors){
res.render('users/login', {
errors: errors
});
} else {
fbRef.authWithPassword({
email: email,
password: password
}, function(error, authData){
if(error){
console.log("Login Failed: ", error);
req.flash('error_msg', 'Login Failed');
res.redirect('/users/login');
} else {
console.log("Authenticated user with uid:",authData);
req.flash('success_msg', 'You are now logged in');
res.redirect('/albums');
}
});
}
});
// Logout User
router.get('/logout', function(req, res){
// Unauthenticate the client
fbRef.unauth();
req.flash('success_msg', 'You are logged out');
res.redirect('/users/login');
});
module.exports = router;
<|start_filename|>_/Section 10/4_elearn/routes/users.js<|end_filename|>
var express = require('express');
var router = express.Router();
var passport = require('passport');
var LocalStrategy = require('passport-local').Strategy;
// Include User Model
var User = require('../models/user');
// Include Student Model
var Student = require('../models/student');
// Include Instructor Model
var Instructor= require('../models/instructor');
// User Register
router.get('/register', function(req, res, next) {
res.render('users/register');
});
// Register User
router.post('/register', function(req, res, next) {
// Get Form Values
var first_name = req.body.first_name;
var last_name = req.body.last_name;
var street_address = req.body.street_address;
var city = req.body.city;
var state = req.body.state;
var zip = req.body.zip;
var email = req.body.email;
var username = req.body.username;
var password = <PASSWORD>;
var password2 = <PASSWORD>;
var type = req.body.type;
// Form Validation
req.checkBody('first_name', 'First name field is required').notEmpty();
req.checkBody('last_name', 'Last name field is required').notEmpty();
req.checkBody('email', 'Email field is required').notEmpty();
req.checkBody('email', 'Email must be a valid email address').isEmail();
req.checkBody('username', 'Username field is required').notEmpty();
req.checkBody('password', 'Password field is required').notEmpty();
req.checkBody('password2', 'Passwords do not match').equals(req.body.password);
errors = req.validationErrors();
if(errors){
res.render('users/register', {
errors: errors
});
} else {
var newUser = new User({
email: email,
username:username,
password: password,
type: type
});
if(type == 'student'){
console.log('Registering Student...');
var newStudent = new Student({
first_name: first_name,
last_name: last_name,
address: [{
street_address: street_address,
city: city,
state: state,
zip: zip
}],
email: email,
username:username
});
User.saveStudent(newUser, newStudent, function(err, user){
console.log('Student created');
});
} else {
console.log('Registering Instructor...');
var newInstructor = new Instructor({
first_name: first_name,
last_name: last_name,
address: [{
street_address: street_address,
city: city,
state: state,
zip: zip
}],
email: email,
username:username
});
User.saveInstructor(newUser, newInstructor, function(err, user){
console.log('Instructor created');
});
}
req.flash('success', 'User Added');
res.redirect('/');
}
});
module.exports = router;
<|start_filename|>_/Section 6/4_TekBooks/controllers/manage.js<|end_filename|>
'use strict';
module.exports = function (router) {
router.get('/', function (req, res) {
res.render('manage/index');
});
router.get('/books', function (req, res) {
res.render('manage/books/index');
});
router.get('/categories', function (req, res) {
res.render('manage/categories/index');
});
};
<|start_filename|>_/Section 9/5_portfolio/routes/admin.js<|end_filename|>
var express = require('express');
var router = express.Router();
var multer = require('multer');
var upload = multer({dest: './public/images/portfolio'});
var mysql = require('mysql');
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '<PASSWORD>',
database: 'portfolio'
});
connection.connect();
router.get('/', function(req, res, next) {
connection.query("SELECT * FROM projects", function(err, rows, fields){
if(err) throw err;
res.render('admin/index', {
"projects": rows
});
});
});
router.get('/add', function(req, res, next) {
res.render('admin/add')
});
router.post('/add', upload.single('projectimage'), function(req, res, next) {
// Get Form Values
var title = req.body.title;
var description = req.body.description;
var service = req.body.service;
var url = req.body.url;
var client = req.body.client;
var projectdate = req.body.projectdate;
// Check Image Upload
if(req.file){
var projectImageName = req.file.filename
} else {
var projectImageName = 'noimage.jpg';
}
// Form Field Validation
req.checkBody('title', 'Title field is required').notEmpty();
req.checkBody('service', 'Service field is required').notEmpty();
var errors = req.validationErrors();
if(errors){
res.render('admin/add', {
errors: errors,
title: title,
description: description,
service: service,
client: client,
url: url
});
} else {
var project = {
title: title,
description: description,
service: service,
client: client,
date: projectdate,
url: url,
image: projectImageName
};
}
var query = connection.query('INSERT INTO projects SET ?', project, function(err, result){
console.log('Error: '+err);
console.log('Success: '+result);
});
req.flash('success_msg', 'Project Added');
res.redirect('/admin');
});
router.get('/edit/:id', function(req, res, next) {
connection.query("SELECT * FROM projects WHERE id = ?", req.params.id, function(err, rows, fields){
if(err) throw err;
res.render('admin/edit', {
"project": rows[0]
});
});
});
router.post('/edit/:id', upload.single('projectimage'), function(req, res, next) {
// Get Form Values
var title = req.body.title;
var description = req.body.description;
var service = req.body.service;
var url = req.body.url;
var client = req.body.client;
var projectdate = req.body.projectdate;
// Check Image Upload
if(req.file){
var projectImageName = req.file.filename
} else {
var projectImageName = 'noimage.jpg';
}
// Form Field Validation
req.checkBody('title', 'Title field is required').notEmpty();
req.checkBody('service', 'Service field is required').notEmpty();
var errors = req.validationErrors();
if(req.file){
if(errors){
res.render('admin/add', {
errors: errors,
title: title,
description: description,
service: service,
client: client,
url: url
});
} else {
var project = {
title: title,
description: description,
service: service,
client: client,
date: projectdate,
url: url,
image: projectImageName
};
}
} else {
if(errors){
res.render('admin/add', {
errors: errors,
title: title,
description: description,
service: service,
client: client,
url: url
});
} else {
var project = {
title: title,
description: description,
service: service,
client: client,
date: projectdate,
url: url
};
}
}
var query = connection.query('UPDATE projects SET ? WHERE id = '+req.params.id, project, function(err, result){
console.log('Error: '+err);
console.log('Success: '+result);
});
req.flash('success_msg', 'Project Updated');
res.redirect('/admin');
});
router.delete('/delete/:id', function (req, res) {
connection.query('DELETE FROM Projects WHERE id = '+req.params.id, function (err, result) {
if (err) throw err;
console.log('deleted ' + result.affectedRows + ' rows');
});
req.flash('success_msg', "Project Deleted");
res.sendStatus(200);
});
module.exports = router;
<|start_filename|>_/Section 7/3_chatio/server.js<|end_filename|>
var express = require('express'),
app = express(),
server = require('http').createServer(app),
io = require('socket.io').listen(server);
server.listen(process.env.PORT || 3000);
console.log('Server Running...');
app.get('/', function(req, res){
res.sendFile(__dirname + '/index.html');
});
io.sockets.on('connection', function(socket){
console.log('Socket Connected...');
// Send Message
socket.on('send message', function(data){
io.sockets.emit('new message', {msg: data});
});
});
<|start_filename|>_/Section 9/5_portfolio/app.js<|end_filename|>
var express = require('express');
var path = require('path');
var bodyParser = require('body-parser');
var exphbs = require('express-handlebars');
var expressValidator = require('express-validator');
var session = require('express-session');
var flash = require('connect-flash');
var multer = require('multer');
var upload = multer({ dest: './public/images/portfolio' });
// Route Files
var routes = require('./routes/index');
var admin = require('./routes/admin');
// Init App
var app = express();
// Body Parser
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
// Handle Sessions
app.use(session({
secret:'secret',
saveUninitialized: true,
resave: true
}));
// Validator
app.use(expressValidator({
errorFormatter: function(param, msg, value) {
var namespace = param.split('.')
, root = namespace.shift()
, formParam = root;
while(namespace.length) {
formParam += '[' + namespace.shift() + ']';
}
return {
param : formParam,
msg : msg,
value : value
};
}
}));
// Public Folder
app.use(express.static(path.join(__dirname, 'public')));
// View Engine
app.set('views', path.join(__dirname, 'views'));
app.engine('handlebars', exphbs({defaultLayout: 'main'}));
app.set('view engine', 'handlebars');
// Connect Flash
app.use(flash());
app.use('/', routes);
app.use('/admin', admin);
app.set('port', (process.env.PORT || 3000));
app.listen(app.get('port'), function(){
console.log('Server started on port: '+app.get('port'));
});
<|start_filename|>_/Section 10/7_elearn/routes/classes.js<|end_filename|>
var express = require('express');
var router = express.Router();
var Class = require('../models/class');
//Classes Page
router.get('/', function(req, res, next) {
Class.getClasses(function(err, classes){
if(err) throw err;
res.render('classes/index', { classes: classes });
},3);
});
// Class Details
router.get('/:id/details', function(req, res, next) {
Class.getClassById([req.params.id],function(err, classname){
if(err) throw err;
res.render('classes/details', { class: classname });
});
});
// Get Lessons
router.get('/:id/lessons', function(req, res, next) {
Class.getClassById([req.params.id],function(err, classname){
if(err) throw err;
res.render('classes/lessons', { class: classname });
});
});
// Get Lesson
router.get('/:id/lessons/:lesson_id', function(req, res, next) {
Class.getClassById([req.params.id],function(err, classname){
var lesson;
if(err) throw err;
for(i=0;i<classname.lessons.length;i++){
if(classname.lessons[i].lesson_number == req.params.lesson_id){
lesson = classname.lessons[i];
}
}
res.render('classes/lesson', { class: classname,lesson: lesson });
});
});
module.exports = router;
<|start_filename|>_/Section 11/4_recipebook/app.js<|end_filename|>
var express = require('express'),
path = require('path'),
bodyParser = require('body-parser'),
cons = require('consolidate'),
dust = require('dustjs-helpers'),
pg = require('pg'),
app = express();
// DB Connect String
var connect = "postgres://eduonix:123456@localhost/recipebookdb";
// Assign Dust Engine To .dust Files
app.engine('dust', cons.dust);
// Set Default Ext .dust
app.set('view engine', 'dust');
app.set('views', __dirname + '/views');
// Set Public Folder
app.use(express.static(path.join(__dirname, 'public')));
// Body Parser Middleware
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.get('/', function(req, res){
// PG Connect
pg.connect(connect, function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
client.query('SELECT * FROM recipes', function(err, result) {
if(err) {
return console.error('error running query', err);
}
res.render('index', {recipes: result.rows});
done();
});
});
});
// Server
app.listen(3000, function(){
console.log('Server Started On Port 3000');
});
<|start_filename|>_/Section 12/9_albumz/app.js<|end_filename|>
var express = require('express');
var path = require('path');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var session = require('express-session');
var expressValidator = require('express-validator');
var flash = require('connect-flash');
var Firebase = require('firebase');
var fbRef = new Firebase('https://albumz01.firebaseio.com/');
// Route Files
var routes = require('./routes/index');
var albums = require('./routes/albums');
var genres = require('./routes/genres');
var users = require('./routes/users');
// Init App
var app = express();
// View Engine
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'ejs');
// Logger
app.use(logger('dev'));
// Body Parser
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
// Handle Sessions
app.use(session({
secret:'secret',
saveUninitialized: true,
resave: true
}));
// Validator
app.use(expressValidator({
errorFormatter: function(param, msg, value) {
var namespace = param.split('.')
, root = namespace.shift()
, formParam = root;
while(namespace.length) {
formParam += '[' + namespace.shift() + ']';
}
return {
param : formParam,
msg : msg,
value : value
};
}
}));
// Static Folder
app.use(express.static(path.join(__dirname, 'public')));
// Connect Flash
app.use(flash());
// Global Vars
app.use(function (req, res, next) {
res.locals.success_msg = req.flash('success_msg');
res.locals.error_msg = req.flash('error_msg');
res.locals.error = req.flash('error');
next();
});
// Routes
app.use('/', routes);
app.use('/albums', albums);
app.use('/genres', genres);
app.use('/users', users);
// Set Port
app.set('port', (process.env.PORT || 3000));
// Run Server
app.listen(app.get('port'), function(){
console.log('Server started on port: '+app.get('port'));
});
<|start_filename|>_/Section 12/9_albumz/public/css/style.css<|end_filename|>
.albums img, .album img{
width:100%;
border:2px #fff solid;
}
.albums img:hover {
opacity:0.7;
cursor: pointer;
}
.albums h4{
margin-bottom:0;
}
footer{
margin-top:40px;
border-top:#333 solid 1px;
padding-top:20px;
}
<|start_filename|>_/Section 10/7_elearn/models/instructor.js<|end_filename|>
var mongoose = require('mongoose');
// Instrucor Schema
var InstructorSchema = mongoose.Schema({
first_name: {
type: String
},
last_name: {
type: String
},
address: [{
street_address:{type: String},
city:{type: String},
state:{type: String},
zip:{type: String}
}],
username: {
type: String
},
email: {
type: String
},
classes:[{
class_id:{type: [mongoose.Schema.Types.ObjectId]},
class_title: {type:String}
}]
});
var Instructor = module.exports = mongoose.model('instructor', InstructorSchema);
module.exports.getInstructorByUsername = function(username, callback){
var query = {username: username};
Instructor.findOne(query, callback);
}
// Register Instructor for Class
module.exports.register = function(info, callback) {
instructor_username = info['instructor_username'];
class_id = info['class_id'];
class_title = info['class_title'];
var query = {username: instructor_username};
Instructor.findOneAndUpdate(
query,
{$push: {"classes": {class_id: class_id, class_title: class_title}}},
{safe: true, upsert: true},
callback
);
}
<|start_filename|>_/Section 12/4_albumz/routes/albums.js<|end_filename|>
var express = require('express');
var router = express.Router();
var Firebase = require('firebase');
var fbRef = new Firebase('https://albumz01.firebaseio.com/');
var multer = require('multer');
var upload = multer({dest:'./public/images/uploads'});
router.get('/', function(req, res, next) {
res.render('albums/index');
});
router.get('/add', function(req, res, next) {
var genreRef = fbRef.child('genres');
genreRef.once('value', function(snapshot){
var data = [];
snapshot.forEach(function(childSnapshot){
var key = childSnapshot.key();
var childData = childSnapshot.val();
data.push({
id: key,
name: childData.name
});
});
res.render('albums/add',{genres: data});
});
});
router.post('/add', upload.single('cover'),function(req, res, next) {
// Check File Upload
if(req.file){
console.log('Uploading File...');
var cover = req.file.filename;
} else {
console.log('No File Uploaded...');
var cover = 'noimage.jpg';
}
// Build Album Object
var album = {
artist: req.body.artist,
title: req.body.title,
genre: req.body.genre,
info: req.body.info,
year: req.body.year,
label: req.body.label,
tracks: req.body.tracks,
cover: cover,
}
// Create Reference
var albumRef = fbRef.child("albums");
// Push Album
albumRef.push().set(album);
req.flash('success_msg', 'Album Saved');
res.redirect('/albums');
});
module.exports = router;
<|start_filename|>_/Section 6/3_TekBooks/controllers/books.js<|end_filename|>
'use strict';
module.exports = function(router){
router.get('/', function(req, res){
res.render('index');
});
router.get('/details/:id', function(req, res){
res.render('books/details');
});
}
<|start_filename|>_/Section 9/5_portfolio/routes/index.js<|end_filename|>
var express = require('express');
var router = express.Router();
var mysql = require('mysql');
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '<PASSWORD>',
database: 'portfolio'
});
connection.connect();
router.get('/', function(req, res, next) {
connection.query("SELECT * FROM projects", function(err, rows, fields){
if(err) throw err;
res.render('index', {
"projects": rows
});
});
});
router.get('/details/:id', function(req, res, next) {
connection.query("SELECT * FROM projects WHERE id = ?", req.params.id, function(err, rows, fields){
if(err) throw err;
res.render('details', {
"project": rows[0]
});
});
});
module.exports = router;
<|start_filename|>_/Section 4/6_nodeblog/public/stylesheets/style.css<|end_filename|>
body {
font: 15px Helvetica, Arial, sans-serif;
background: #f4f4f4;
color:#666;
}
.logo{
text-align: center;
margin:auto;
display:block;
}
.container{
width:750px;
border: 1px solid #ccc;
margin: 20px auto;
padding:20px;
border-top: #83cd39 3px solid;
}
.clr{
clear: both;
}
ul{
padding:0;
margin:0;
}
h1,h2,h3,p{
padding:5px 0;
margin-bottom:0;
}
p{
margin:0;
}
nav{
background:#404137;
color:#fff;
overflow:auto;
height:40px;
padding:20px 0 0 10px;
font-size: 18px;
}
nav li{
float:left;
list-style:none;
}
nav a{
padding:10px;
margin:0 10px;
color:#fff;
text-decoration:none;
}
nav a.current, nav a:hover{
background: #83cd29;
color:#000;
}
.meta{
padding:7px;
border:1px solid #ccc;
background:#ccc;
margin-bottom:10px;
}
a.more{
display:block;
width:80px;
background:#404137;
color:#fff;
padding:10px;
margin-top:30px;
text-decoration: none;
}
.post{
border-bottom:1px solid #ccc;
padding-bottom:20px;
}
.post h1 a{
color:#666;
text-decoration: none;
}
input, select, textarea{
margin-bottom:15px;
}
label{
display:inline-block;
width:180px;
}
input[type='text'], select, textarea{
padding:3px;
height:20px;
width:200px;
border:1px #ccc solid;
}
select{
height:28px;
}
textarea{
height:70px;
width:400px;
} | paullewallencom/node-978-1-7871-2221-5 |
<|start_filename|>src/app/panels/heatmap/module.js<|end_filename|>
/*
## HeatMap D3 Panel
*/
define([
'angular',
'app',
'underscore',
'jquery',
'd3',
'require',
'css!./module.css'
],
function (angular, app, _, $, d3, localRequire) {
'use strict';
var module = angular.module('kibana.panels.heatmap', []);
app.useModule(module);
module.controller('heatmap', function ($scope, dashboard, querySrv, filterSrv) {
$scope.MIN_ROWS = 1;
$scope.MAX_ROWS = 100;
$scope.panelMeta = {
modals: [
{
description: "Inspect",
icon: "icon-info-sign",
partial: "app/partials/inspector.html",
show: $scope.panel.spyable
}
],
editorTabs: [
{
title: 'Queries',
src: 'app/partials/querySelect.html'
}
],
status: "Experimental",
description: "Heat Map for Representing Pivot Facet Counts",
rotate: true
};
var _d = {
queries: {
mode: 'all',
ids: [],
query: '*:*',
custom: ''
},
size: 0,
row_field: '',
col_field: '',
row_size: 5,
color:'gray',
spyable: true,
transpose_show: true,
transposed: false,
show_queries:true,
};
// Set panel's default values
_.defaults($scope.panel, _d);
$scope.requireContext = localRequire;
$scope.init = function () {
$scope.generated_id = $scope.randomNumberRange(1, 1000000);
$scope.$on('refresh', function () {
$scope.get_data();
});
$scope.get_data();
};
$scope.randomNumberRange = function(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
};
$scope.get_data = function () {
// Show progress by displaying a spinning wheel icon on panel
$scope.panelMeta.loading = true;
delete $scope.panel.error;
var request, results;
// Set Solr server
$scope.sjs.client.server(dashboard.current.solr.server + dashboard.current.solr.core_name);
// -------------------- TODO: REMOVE ALL ELASTIC SEARCH AFTER FIXING SOLRJS --------------
$scope.panel.queries.ids = querySrv.idsByMode($scope.panel.queries);
// This could probably be changed to a BoolFilter
var boolQuery = $scope.sjs.BoolQuery();
_.each($scope.panel.queries.ids, function (id) {
boolQuery = boolQuery.should(querySrv.getEjsObj(id));
});
request = $scope.sjs.Request();
request = request.query(
$scope.sjs.FilteredQuery(
boolQuery,
filterSrv.getBoolFilter(filterSrv.ids)
))
.size($scope.panel.size); // Set the size of query result
$scope.populate_modal(request);
// --------------------- END OF ELASTIC SEARCH PART ---------------------------------------
var fq = '';
if (filterSrv.getSolrFq()) {
fq = '&' + filterSrv.getSolrFq();
}
var wt_json = '&wt=json';
var rows_limit = '&rows=' + $scope.panel.size;
var facet = '&facet=true';
var facet_pivot = '&facet.pivot=' + $scope.panel.row_field + ',' + $scope.panel.col_field;
var facet_limit = '&facet.limit=' + $scope.panel.row_size;
var facet_pivot_mincount = '&facet.pivot.mincount=0';
$scope.panel.queries.query = querySrv.getORquery() + fq + wt_json + rows_limit + facet + facet_pivot + facet_limit + facet_pivot_mincount;
// Set the additional custom query
if ($scope.panel.queries.custom != null) {
request = request.setQuery($scope.panel.queries.query + $scope.panel.queries.custom);
} else {
request = request.setQuery($scope.panel.queries.query);
}
// Execute the search and get results
results = request.doSearch();
// Populate scope when we have results
results.then(function (results) {
// Check for error and abort if found
if(!(_.isUndefined(results.error))) {
$scope.panel.error = $scope.parse_error(results.error.msg);
$scope.init_arrays();
$scope.render();
return;
}
// build $scope.data array
var facets = results.facet_counts.facet_pivot;
var key = Object.keys(facets)[0];
$scope.facets = facets[key];
$scope.init_arrays();
$scope.formatData($scope.facets, $scope.panel.transposed);
$scope.render();
});
// Hide the spinning wheel icon
$scope.panelMeta.loading = false;
};
$scope.init_arrays = function() {
$scope.data = [];
$scope.row_labels = [];
$scope.col_labels = [];
$scope.hcrow = [];
$scope.hccol = [];
$scope.internal_sum = [];
$scope.domain = [Number.MAX_VALUE,0];
$scope.axis_labels = [$scope.panel.col_field, $scope.panel.row_field];
};
$scope.formatData = function(facets, flipped) {
$scope.init_arrays();
_.each(facets, function(d, i) {
// build the arrays to be used
if(!flipped) {
$scope.row_labels.push(d.value);
$scope.hcrow.push($scope.row_labels.length);
} else {
$scope.col_labels.push(d.value);
$scope.hccol.push($scope.col_labels.length);
[$scope.axis_labels[0], $scope.axis_labels[1]] = [$scope.axis_labels[1], $scope.axis_labels[0]];
}
_.each(d.pivot, function(p) {
// columns in each row
var entry = {};
var v = p.value;
var index;
if(!flipped) {
$scope.internal_sum.push(0);
if($scope.col_labels.indexOf(v) === -1) {
$scope.col_labels.push(v);
$scope.hccol.push($scope.col_labels.length);
}
index = $scope.col_labels.indexOf(v); // index won't be -1 as we count in the facets with count = 0
$scope.internal_sum[index] += p.count;
entry.row = i + 1;
entry.col = index + 1;
} else {
if($scope.row_labels.indexOf(v) === -1) {
$scope.row_labels.push(v);
$scope.hcrow.push($scope.row_labels.length);
}
index = $scope.row_labels.indexOf(v); // index won't be -1 as we count in the facets with count = 0
$scope.internal_sum[index] += p.count;
entry.col = i + 1;
entry.row = index + 1;
}
entry.value = p.count;
$scope.domain[0] = Math.min($scope.domain[0], p.count);
$scope.domain[1] = Math.max($scope.domain[1], p.count);
$scope.data.push(entry);
});
});
};
$scope.flip = function() {
$scope.panel.transposed = !$scope.panel.transposed;
$scope.formatData($scope.facets, $scope.panel.transposed);
$scope.render();
};
$scope.set_refresh = function (state) {
$scope.refresh = state;
};
$scope.close_edit = function () {
if ($scope.refresh) {
$scope.get_data();
$scope.formatData($scope.facets, $scope.panel.transposed);
$scope.render();
}
$scope.refresh = false;
$scope.$emit('render');
};
$scope.render = function () {
$scope.$emit('render');
};
$scope.populate_modal = function (request) {
$scope.inspector = angular.toJson(JSON.parse(request.toString()), true);
};
$scope.build_search = function(x, y) {
if (x && y) {
filterSrv.set({type: 'terms', field: $scope.panel.row_field, value: x, mandate: 'must'});
filterSrv.set({type: 'terms', field: $scope.panel.col_field, value: y, mandate: 'must'});
} else {
return;
}
dashboard.refresh();
};
});
module.directive('heatmapChart', function () {
return {
restrict: 'E',
link: function (scope, element) {
scope.$on('render', function () {
render_panel();
});
angular.element(window).bind('resize', function () {
render_panel();
});
// Function for rendering panel
function render_panel() {
var parent_width = element.parent().width(),
row_height = parseInt(scope.row.height);
const TICK_LENGTH = 10;
const MARGIN = 15;
const MAX_LABEL_LENGTH = 10;
const INTENSITY = 3;
const LEGEND = {
height: 20,
width: parent_width / 2,
margin: 10,
text_margin: 10,
text_height: 15
};
const labels = {
top: 90,
left: 120
};
element.html('<div id="_' + scope.generated_id + '" style="height: 100%"></div>');
var data = jQuery.extend(true, [], scope.data); // jshint ignore:line
var intensity_domain = d3.scale.linear().domain(scope.domain).range([-INTENSITY, INTENSITY]);
data = _.map(data, function(d){
return{
row: +d.row,
col: +d.col,
value: +intensity_domain(d.value)
};
});
var svg_width = parent_width,
svg_height = row_height,
canvas_height = svg_height - labels.top - MARGIN - LEGEND.margin
- LEGEND.height - LEGEND.text_margin - LEGEND.text_height,
canvas_width = svg_width - labels.left;
var rowSortOrder = false,
colSortOrder = false;
var cell_color = scope.panel.color;
var hcrow, hccol, rowLabel, colLabel;
// jshint ignore:start
hcrow = jQuery.extend(true, [], scope.hcrow),
hccol = jQuery.extend(true, [], scope.hccol),
rowLabel = jQuery.extend(true, [], scope.row_labels),
colLabel = jQuery.extend(true, [], scope.col_labels);
// jshint ignore:end
var cell_width = canvas_width / colLabel.length,
cell_height = canvas_height / rowLabel.length,
col_number = colLabel.length,
row_number = rowLabel.length;
var colorScale = (shift) => { return d3.hsl(cell_color).darker(shift).toString(); };
var $tooltip = $('<div>');
var svg = d3.select("#_" + scope.generated_id).append("svg")
.attr("width", "98%")
.attr("height", "98%")
.append("g");
// Row Labels
var rowLabels = svg.append("g") // jshint ignore:line
.selectAll(".rowLabelg")
.data(rowLabel)
.enter()
.append("text")
.text(function (d) {
if(d.length > MAX_LABEL_LENGTH) {
return d.substring(0, MAX_LABEL_LENGTH) + '...';
} else {
return d;
}
})
.attr("x", 0)
.attr("y", function (d, i) {
return labels.top + MARGIN + hcrow.indexOf(i + 1) * cell_height;
})
.attr("transform", "translate(25, " + cell_height / 2 + ")")
.attr("alignment-baseline", "middle")
.attr("class", function () {
return "rowLabel_" + scope.generated_id + " axis-label";
})
.on("mouseover", function (d) {
d3.select(this).classed("text-hover", true);
$tooltip.html(d).place_tt(d3.event.pageX, d3.event.pageY);
})
.on("mouseout", function () {
d3.select(this).classed("text-hover", false);
d3.select(this).classed("cell-hover", false);
d3.selectAll(".rowLabel_" + scope.generated_id).classed("text-highlight", false);
d3.selectAll(".colLabel_" + scope.generated_id).classed("text-highlight", false);
$tooltip.detach();
})
.on("click", function (d, i) {
rowSortOrder = !rowSortOrder;
sortbylabel("r", i, rowSortOrder);
});
svg.append("text")
.attr("x", 0)
.attr("y", 0)
.text(scope.axis_labels[1])
.attr("transform", "translate(10, " + svg_height / 2 + ") rotate(-90)")
.attr("class", "axis-label");
// Column labels
var colLabels = svg.append("g") // jshint ignore:line
.selectAll(".colLabelg")
.data(colLabel)
.enter()
.append("text")
.text(function (d) {
if(d.length > MAX_LABEL_LENGTH) {
return d.substring(0, MAX_LABEL_LENGTH) + '...';
} else {
return d;
}
})
.attr("x", -labels.top)
.attr("y", function (d, i) {
return 100 + hccol.indexOf(i + 1) * cell_width;
})
.attr("text-anchor", "start")
.attr("alignment-baseline", "middle")
.attr("transform", "translate(" + cell_width / 2 + ", 0) rotate (-90)")
.attr("class", function () {
return "colLabel_" + scope.generated_id + " axis-label";
})
.on("mouseover", function (d) {
d3.select(this).classed("text-hover", true);
$tooltip.html(d).place_tt(d3.event.pageX, d3.event.pageY);
})
.on("mouseout", function () {
d3.select(this).classed("text-hover", false);
d3.select(this).classed("cell-hover", false);
d3.selectAll(".rowLabel_" + scope.generated_id).classed("text-highlight", false);
d3.selectAll(".colLabel_" + scope.generated_id).classed("text-highlight", false);
$tooltip.detach();
})
.on("click", function (d, i) {
colSortOrder = !colSortOrder;
sortbylabel("c", i, colSortOrder);
});
svg.append("text")
.attr("x", 0)
.attr("y", 0)
.text(scope.axis_labels[0])
.attr("transform", "translate(" + svg_width / 2 + ", 10)")
.attr("class", "axis-label");
// Heatmap component
var heatMap = svg.append("g"); // jshint ignore:line
heatMap.attr("transform", "translate(100, " + (labels.top + MARGIN) + ")")
.selectAll(".cellg")
.data(data, function (d) {
return d.row + ":" + d.col;
})
.enter()
.append("rect")
.attr("x", function (d) {
return hccol.indexOf(d.col) * cell_width;
})
.attr("y", function (d) {
return hcrow.indexOf(d.row) * cell_height;
})
.attr("class", function (d) {
return "cell_" + scope.generated_id + " cell-border cr" + (d.row - 1) + "_" + scope.generated_id + " cc" + (d.col - 1) + "_" + scope.generated_id;
})
.attr("width", cell_width)
.attr("height", cell_height)
.style("fill", function (d) {
return colorScale(d.value);
})
.on("mouseover", function (d, i) {
//highlight text
d3.select(this).classed("cell-hover", true);
d3.selectAll(".rowLabel_" + scope.generated_id).classed("text-highlight", function (r, ri) {
return ri === (d.row - 1);
});
d3.selectAll(".colLabel_" + scope.generated_id).classed("text-highlight", function (c, ci) {
return ci === (d.col - 1);
});
$tooltip.html(rowLabel[d.row - 1] + ", " + colLabel[d.col - 1] + " (" + scope.data[i].value + ")").place_tt(d3.event.pageX, d3.event.pageY);
})
.on("mouseout", function () {
d3.select(this).classed("cell-hover", false);
d3.selectAll(".rowLabel_" + scope.generated_id).classed("text-highlight", false);
d3.selectAll(".colLabel_" + scope.generated_id).classed("text-highlight", false);
$tooltip.detach();
})
.on("click", (d) => {
d3.select(this).classed("cell-hover", false);
$tooltip.detach();
scope.build_search(rowLabel[d.row - 1], colLabel[d.col - 1]);
});
// Grid
heatMap.append("g")
.selectAll(".gridgv")
.data(d3.range(hccol.length + 1))
.enter()
.append("line")
.attr("x1", (d) => {
return d * cell_width;
})
.attr("y1", 0)
.attr("x2", (d) => {
return d * cell_width;
})
.attr("y2", hcrow.length * cell_height)
.attr("class", "grid");
heatMap.append("g")
.selectAll(".gridgh")
.data(d3.range(hcrow.length + 1))
.enter()
.append("line")
.attr("x1", 0)
.attr("y1", (d) => {
return d * cell_height;
})
.attr("x2", hccol.length * cell_width)
.attr("y2", (d) => {
return d * cell_height;
})
.attr("class", "grid");
// Column ticks
heatMap.append("g") // jshint ignore:line
.selectAll(".colLabelg")
.data(colLabel)
.enter()
.append("line")
.attr("x1", 0)
.attr("y1", 0)
.attr("x2", 0)
.attr("y2", TICK_LENGTH)
.attr("transform", (d, i) => {
return "translate(" + (hccol.indexOf(i + 1) * cell_width + cell_width / 2) + ", -5)";
})
.attr("class", "tick");
// Row ticks
heatMap.append("g") // jshint ignore:line
.selectAll(".rowLabelg")
.data(rowLabel)
.enter()
.append("line")
.attr("x1", 0)
.attr("y1", 0)
.attr("x2", 0)
.attr("y2", TICK_LENGTH)
.attr("transform", (d, i) => {
return "translate(5, " + (hcrow.indexOf(i + 1) * cell_height + cell_height / 2) + ") rotate (90)";
})
.attr("class", "tick");
// Legend
var linearGradient = svg.append("defs").append("linearGradient")
.attr("id", "legendGradient_" + scope.generated_id);
linearGradient.append("stop")
.attr("offset", "0%")
.attr("stop-color", colorScale(-INTENSITY));
linearGradient.append("stop")
.attr("offset", "50%")
.attr("stop-color", colorScale(0));
linearGradient.append("stop")
.attr("offset", "100%")
.attr("stop-color", colorScale(INTENSITY));
var legend = svg.append("svg");
legend.attr("x", parseInt((svg_width - LEGEND.width) / 2))
.attr("y", svg_height - LEGEND.margin - LEGEND.height - LEGEND.text_height).append("g");
legend.append("rect")
.attr("width", LEGEND.width)
.attr("height", LEGEND.height)
.attr("fill", "url('#legendGradient_" + scope.generated_id + "')");
legend.append("g")
.selectAll(".legendt")
.data(d3.range(11))
.enter()
.append("line")
.attr("x1", (d) => {
return parseInt(d * LEGEND.width / 10);
})
.attr("y1", LEGEND.height - TICK_LENGTH)
.attr("x2", (d) => {
return parseInt(d * LEGEND.width / 10);
})
.attr("y2", LEGEND.height)
.attr("class", "tick");
legend.append("g")
.selectAll(".legendl")
.data(d3.range(11))
.enter()
.append("text")
.attr("x", (d) => {
return parseInt(d * LEGEND.width / 10);
})
.attr("y", parseInt(LEGEND.height + 15))
.text((d) => {
return Math.round(scope.domain[0] + (scope.domain[1] - scope.domain[0]) / 10 * d);
})
.attr("text-anchor", "middle")
.attr("class", "axis-label");
// Function to sort the cells with respect to selected row or column
function sortbylabel(rORc, i, sortOrder) {
// rORc .. r for row, c for column
var t = svg.transition().duration(1200);
var values = []; // holds the values in this specific row
for(var j = 0; j < col_number; j++) { values.push(-Infinity); }
var sorted; // sorted is zero-based index
d3.selectAll(".c" + rORc + i + "_" + scope.generated_id)
.filter(function (ce) {
if(rORc === "r") {
values[ce.col - 1] = ce.value;
} else {
values[ce.row - 1] = ce.value;
}
});
if (rORc === "r") { // sorting by rows
// can't be col_number
// must select from already there coluns (rows)
sorted = d3.range(col_number).sort(function (a, b) {
var value;
if (sortOrder) {
value = values[b] - values[a];
value = isNaN(value) ? Infinity : value;
} else {
value = values[a] - values[b];
value = isNaN(value) ? Infinity : value;
}
return value;
});
t.selectAll(".cell_" + scope.generated_id)
.attr("x", function (d) {
return sorted.indexOf(d.col - 1) * cell_width;
});
t.selectAll(".colLabel_" + scope.generated_id)
.attr("y", function (d, i) {
return 100 + sorted.indexOf(i) * cell_width;
});
} else { // sorting by columns
sorted = d3.range(row_number).sort(function (a, b) {
var value;
if (sortOrder) {
value = values[b] - values[a];
value = isNaN(value) ? Infinity : value;
} else {
value = values[a] - values[b];
value = isNaN(value) ? Infinity : value;
}
return value;
});
t.selectAll(".cell_" + scope.generated_id)
.attr("y", function (d) {
return sorted.indexOf(d.row - 1) * cell_height;
});
t.selectAll(".rowLabel_" + scope.generated_id)
.attr("y", function (d, i) {
return labels.top + MARGIN + sorted.indexOf(i) * cell_height;
});
}
}
}
}
};
});
}); | harrydcollins/banana-dhs |
<|start_filename|>assets/javascripts/drawindicators.js<|end_filename|>
function drawIndicators(plannedValue, actualCost, earnedValue){
var plannedValue = plannedValue;
var actualCost = actualCost;
var earnedValue = earnedValue;
//Bar indicators and behavior from sidebar.---------------------------------------------------------------------------
var maxValue = Math.max(plannedValue, actualCost, earnedValue);
var barMaxHeight = 100;
var spiPlannedValueBarHeight = Math.round((plannedValue * barMaxHeight) / maxValue);
var earnedValueBarHeight = Math.round((earnedValue * barMaxHeight) / maxValue);
var cpiActualCostBarHeight = Math.round((actualCost * barMaxHeight) / maxValue);
$('#spi-pv-bar').height(spiPlannedValueBarHeight);
$('#spi-ev-bar').height(earnedValueBarHeight);
$('#cpi-ev-bar').height(earnedValueBarHeight);
$('#cpi-ac-bar').height(cpiActualCostBarHeight);
var minHeightForFont = parseInt($('.bars p').css("font-size"));
//This aligns the Label inside the bars correctly when these its too small.
if(spiPlannedValueBarHeight < minHeightForFont)
$('#evm-spi-pv-bar-container p').css("padding-bottom", "" + spiPlannedValueBarHeight + "px");
else $('#evm-spi-pv-bar-container p').css("line-height", "" + spiPlannedValueBarHeight + "px"); //Alinha o "PV" no bar
if(earnedValueBarHeight < minHeightForFont){
$('#evm-spi-ev-bar-container p').css("padding-bottom", "" + earnedValueBarHeight + "px");
$('#evm-cpi-ev-bar-container p').css("padding-bottom", "" + earnedValueBarHeight + "px");
} else {
$('#evm-spi-ev-bar-container p').css("line-height", "" + earnedValueBarHeight + "px");
$('#evm-cpi-ev-bar-container p').css("line-height", "" + earnedValueBarHeight + "px");
}
if(cpiActualCostBarHeight < minHeightForFont)
$('#evm-cpi-ac-bar-container p').css("padding-bottom", "" + cpiActualCostBarHeight + "px");
else $('#evm-cpi-ac-bar-container p').css("line-height", "" + cpiActualCostBarHeight + "px");
}
<|start_filename|>assets/javascripts/drawchart.js<|end_filename|>
/* Dependencies: jquery, flot, flottime, flotlabel, gaugemin */
//Draws the chart for the project or versions. (Flot)
function drawChart(dataToChart, placeholder, actualWeek, endDate){
var actualWeek = actualWeek;
var chartHtmlElement = $('#' + placeholder);
var data = dataToChart;
var endDate = endDate;
var actualCostEstimateLine;
var earnedValueEstimateLine;
if (actualWeek <= endDate) { //For OLD Projects
var markings = [{ color: "#E0E0E0", lineWidth: 1, xaxis: { from: actualWeek , to: actualWeek } }]; //This is the marker to the "Project is here" marking today date.
actualCostEstimateLine = data.actual_cost_forecast;
earnedValueEstimateLine = data.earned_value_forecast;
}
var graphData = [
{
data: data.bac_top_line,
label: RedmineEVM.I18n.t('budget_at_complete'),
color: "#CEE8FA", dashes: { show: true, lineWidth: 1 }
},{
data: data.eac_top_line ,
label: RedmineEVM.I18n.t('estimated_at_complete'),
color: "#FFE2B8", dashes: { show: true, lineWidth: 1 }
},{
data: actualCostEstimateLine ,
label: RedmineEVM.I18n.t('actual_cost_forecast'),
color: "#FCB040", dashes: { show: true, lineWidth: 3 }, points: { show: true, fill: true, fillColor: "#FCB040" }
},{
data: earnedValueEstimateLine ,
label: RedmineEVM.I18n.t('earned_value_forecast'),
color: "#8CC63F", dashes: { show: true, lineWidth: 3 }, points: { show: true, fill: true, fillColor: "#8CC63F" }
},{
data: data.planned_value,
label: RedmineEVM.I18n.t('planned_value'),
color: '#0F75BC'
},{
data: data.actual_cost,
label: RedmineEVM.I18n.t('actual_cost'),
color: '#FBC040'
},{
data: data.earned_value,
label: RedmineEVM.I18n.t('earned_value'),
color: '#8CC63F'
}];
// Lines
var plot = $.plot(chartHtmlElement, graphData, {
series: {
shadowSize: 0,
lines: { lineWidth: 3 },
points: { radius: 2 }
},
grid: {
markings: markings,
color: 'transparent',
borderColor: { bottom: "#BFBFBF", left: "#BFBFBF" },
borderWidth: 1,
hoverable: true
},
xaxis: {
mode: "time",
timeformat: "%d %b %Y", /*"%d %b %Y"*/
minTickSize: [1, "day"],
axisLabel: RedmineEVM.I18n.t('label_date'),
axisLabelUseCanvas: true,
axisLabelFontSizePixels: 10,
axisLabelPadding: 6
},
yaxis: {
min: 0,
axisLabel: RedmineEVM.I18n.t('label_hours'),
axisLabelUseCanvas: true,
axisLabelFontSizePixels: 10,
axisLabelPadding: 6
},
legend: { show: false }
});
//Flot tooltip style
$("<div id='tooltip'></div>").css({
position: "absolute",
display: "none",
//border: "1px solid #fdd",
padding: "2px",
"background-color": "#FFFFFF",
opacity: 0.80
}).appendTo("body");
//Flot tooltip
chartHtmlElement.bind("plothover", function (event, pos, item) {
if (item) {
var x = item.datapoint[0].toFixed(2),
y = item.datapoint[1].toFixed(2);
var hours = y
var date = moment(parseInt(x)).format("DD MMM YYYY")
//Use moment.js lib!
$("#tooltip").html("<b>" + item.series.label + "</b> " + hours + " hours <br>" + date)
.css({top: item.pageY+5, left: item.pageX+5})
.fadeIn(200);
} else {
$("#tooltip").hide();
}
});
//The marker 'project is here'.
if (actualWeek <= endDate) {
var maxYValue = parseInt($('.flot-y-axis .tickLabel').last().text());
var o = plot.pointOffset({ x: actualWeek, y: maxYValue * 0.1});
chartHtmlElement.append("<div id='marker-label-chart' class='markers' style='left:" + (o.left + 5) + "px;top:" + o.top + "px;'>Project is here</div>");
}
}
<|start_filename|>assets/stylesheets/evm.css<|end_filename|>
#evm-title{
line-height: 0px;
padding-top: 17px;
}
#evm-subtitle{ color: gray; }
/****Sidebar Indicator Bars****/
.indicators-container{
overflow: auto;
margin-right: 10px;
padding-left: 15px;
float: left;
}
.indicators-container p{
line-height: 15px;
padding: 15px 0 15px 0;
font-size: 12px;
}
#evm-forecast-indicators-container{ margin-top: 10px; }
#evm-forecast-indicators-container p{ padding: 0; }
.evm-bars-container{
width: 80px;
height: 100px;
}
#spi-pv-bar{ background-color: #0f75bc; }
#spi-ev-bar{ background-color: #8cc63f; }
#cpi-ev-bar{ background-color: #8cc63f; }
#cpi-ac-bar{ background-color: #fcb040; }
.bars{
width: 40px;
height: 100px;
float: left;
text-align: center;
position: relative;
}
.bars > p{
position: absolute;
width: 40px;
padding: 0;
margin: 0;
bottom: 0;
z-index: 100;
}
.bar{
position: absolute;
width: 40px;
bottom: 0;
z-index: 50;
}
/********** Sidebar Summary Box **********/
/*#evm-summary{ margin-bottom: 10px; }
#evm-forecast{ margin-top: 10px;}*/
.sidebar-boxes{
background: #3d454d;
overflow: auto;
padding: 10px;
color: white;
}
/********** Dropdown Box ********/
#baseline_id{ border-radius: 0; }
#baseline_id option{ color: gray; }
/*********** Charts ***********/
#evm-charts-wrapper{ margin: auto; }
#evm-main-chart{ padding: 40px 0 40px 0; }
/*Tooltip*/
#flot_chart_tooltip{
position: absolute;
display: none;
border: 1px solid #ddd;
padding: 2px;
background-color: #eee;
opacity: 0.80;
}
.markers{
padding: 2px;
position: absolute;
background-color: rgba(255, 255, 255, 0.7);
color: #d0d0d0;
font-size: smaller;
}
/********* Chart legend + Info ********/
#evm-legend-info{
border: lightgray 1px solid;
margin: 10px 10px 5px 10px;
}
#evm-legend-content{
margin: auto;
width: 900px;
overflow: auto;
}
#evm-legend-content div{
display: inline-block;
padding: 20px;
color: dimgray;
line-height: 5px;
}
#evm-legend-ev{
padding-top: 10px;
color: #8cc63f;
}
#evm-legend-ac{
padding-top: 20px;
color: #fcb040;
}
#evm-legend-pv{
padding-top: 20px;
color: #0f75bc ;
}
#evm-legend-forecast-container{
width: 410px;
margin: auto;
}
#evm-legend-forecast-container div{
display: inline-block;
padding: 20px;
color: dimgray;
line-height: 5px;
}
.evm-legend-lines{
margin: auto;
margin-bottom: 10px;
}
.evm-legend-lines > * {
display: inline;
margin-right: 5px;
}
| imaginary-cloud/redmine_evm |
<|start_filename|>anisotropic_layered.frag<|end_filename|>
//we only want to alter these passes
#if defined(MATERIAL_PASS_LIGHT) || defined(MATERIAL_PASS_VOXELIZATION)
#include "../state.frag"
#include "../other/lightParams.frag"
#include "BRDF/AnisotropicMicrofacetBRDF.frag"
#include "../other/customExtras.sh"
#define NB_LAYERS 2
//Input parameters
uniform vec3 uDielectric_eta; //name "dielectric_eta" min 0.0001 max 2.0 default 1.49, 1.49, 1.49
uniform vec3 uConductor_eta; //name "conductor_eta" min 0.0001 max 2.0 default 1.0, 1.0, 1.0
uniform vec3 uConductor_kappa; //name "conductor_kappa" min 0.0 max 10.0 default 1.0, 0.0, 0.0
uniform vec2 uDielectric_alpha; // name "dielectric_alpha" min 0.01 max 1.0 default 0.001 0.1
uniform vec2 uConductor_alpha; // name "conductor_alpha" min 0.01 max 1.0 default 0.001 0.1
uniform int uMaterial_set; // name "0:Layered 1:Dielectric, 2:Conductor" min 0 max 2 default 0
uniform int uSample_num; // name "sumple_num" min 1 max 4096 default 1024
uniform float uDielectric_rotate; // name "Rotation_Dielectric" min -90.00 max 90.00 default 45.0
uniform float uConductor_rotate; // name "Rotation_Conductor" min -90.00 max 90.00 default -75.0
//math
//----begin----
float average (vec3 v) {return (v.x + v.y + v.z) / 3.0;}
bool isZero (vec3 v) {return (v.x==0.0 && v.y==0.0 && v.z==0.0) ? true : false;}
vec2 CalculateEigenValues( in mat2 m )
{
const float avg = ( m._11 + m._22 ) / 2.0; // Average eigenvalue.
const float det = max( determinant( m ), 0.0 ); // The determinant must be within [0, square of the average eigenvalue].
const float eigenvalueMax = avg + sqrt( max( avg * avg - det, 0.0 ) );
const float eigenvalueMin = min( det / eigenvalueMax, avg ); // To avoid the numerical error, we compute the minimum eigenvalue using the maximum eigenvalue.
return vec2( eigenvalueMax, eigenvalueMin );
}
// The input variable eigenvalue is assumed to be the maximum eigenvalue for the numerical stability.
// If it is not the maximum, the resulting eigenvector can have a large precision error.
// This implementation assumes m._12 = m._21.
vec2 CalculateEigenVectorMax( in mat2 m, in float eigenvalue )
{
return normalize( m._11 < m._22 ? vec2( m._12, eigenvalue - m._11 ) : vec2( eigenvalue - m._22, m._12 ) );
}
// The input variable eigenvalue is assumed to be the minimum eigenvalue for the numerical stability.
// If it is not the minimum, the resulting eigenvector can have a large precision error.
// This implementation assumes m._12 = m._21.
vec2 CalculateEigenVectorMin( in mat2 m, in float eigenvalue )
{
return normalize( m._11 > m._22 ? vec2( m._12, eigenvalue - m._11 ) : vec2( eigenvalue - m._22, m._12 ) );
}
float GetRandomNumber(in vec2 v)
{
return frac(sin(dot(v.xy, vec2(12.9898, 78.233))) * 43758.5453);
}
static vec2 randState;
float rand()
{
randState.x = GetRandomNumber(randState);
randState.y = GetRandomNumber(randState);
return randState.x;
}
vec3 Stretch( in vec3 direction, in vec2 roughness )
{
return vec3( direction.x * roughness.x, direction.y * roughness.y, direction.z );
}
mat2 inverse(in mat2 m)
{
return mat2(m._22, -m._12, -m._21, m._11) / determinant(m);
}
//----end----
/* Roughness to linear space conversions*/
//----begin----
#define USE_BEST_FIT
float roughnessToVariance(float a)
{
#ifdef USE_BEST_FIT
a = clamp(a, 0.0, 0.9999);
float a3 = pow(a, 1.1);
return a3 / (1.0 - a3);
#else
return a / (1.0-a);
#endif
}
float varianceToRoughness(float v)
{
#ifdef USE_BEST_FIT
return pow(v / (1.0 + v), 1.0/1.1);
#else
return v / (1.0+v);
#endif
}
vec2 roughnessToVariance(vec2 v)
{
#ifdef USE_BEST_FIT
vec2 vout = vec2(clamp(v.x, 0.0, 0.9999), clamp(v.y, 0.0, 0.9999));
vec2 v3 = vec2(pow(vout.x, 1.1), pow(vout.y, 1.1));
return v3 / (vec2(1.0, 1.0) - v3);
#else
return v / (vec2(1.0, 1.0) - v);
#endif
}
vec2 varianceToRoughness(vec2 v)
{
#ifdef USE_BEST_FIT
return vec2(pow(v.x / (1.0 + v.x), 1.0/1.1), pow(v.y / (1.0 + v.y), 1.0/1.1));
#else
return v / (vec3(1.0, 1.0) + v);
#endif
}
//----end----
vec3 fresnelConductorExact(float cosThetaI, vec3 eta, vec3 k) {
/*From Mitsuba(https://github.com/mitsuba-renderer/mitsuba/blob/1fd0f671dfcb77f813c0d6a36f2aa4e480b5ca8e/src/libcore/util.cpp) */
float cosThetaI2 = cosThetaI*cosThetaI,
sinThetaI2 = 1-cosThetaI2,
sinThetaI4 = sinThetaI2*sinThetaI2;
vec3 temp1 = eta*eta - k*k - sinThetaI2,
a2pb2 = sqrt(max(temp1*temp1 + k*k*eta*eta*4, 0.0)),
a = sqrt(max((a2pb2 + temp1) * 0.5f, 0.0));
vec3 term1 = a2pb2 + vec3(cosThetaI2, cosThetaI2, cosThetaI2),
term2 = a*(2*cosThetaI);
vec3 Rs2 = (term1 - term2) / (term1 + term2);
vec3 term3 = a2pb2*cosThetaI2 + vec3(sinThetaI4, sinThetaI4, sinThetaI4),
term4 = term2*sinThetaI2;
vec3 Rp2 = Rs2 * (term3 - term4) / (term3 + term4);
return 0.5 * (Rp2 + Rs2);
}
vec3 fresnelDielectricExt(float cosThetaI_, float eta)
{
/*From Mitsuba(https://github.com/mitsuba-renderer/mitsuba/blob/1fd0f671dfcb77f813c0d6a36f2aa4e480b5ca8e/src/libcore/util.cpp) */
float cosThetaT_;
if (eta == 1.0) {
cosThetaT_ = -cosThetaI_;
return 0.0f;
}
/* Using Snell's law, calculate the squared sine of the
angle between the normal and the transmitted ray */
float scale = (cosThetaI_ > 0) ? 1.0/eta : eta,
cosThetaTSqr = 1 - (1-cosThetaI_*cosThetaI_) * (scale*scale);
/* Check for total internal reflection */
if (cosThetaTSqr <= 0.0) {
cosThetaT_ = 0.0;
return 1.0;
}
/* Find the absolute cosines of the incident/transmitted rays */
float cosThetaI = abs(cosThetaI_);
float cosThetaT = sqrt(cosThetaTSqr);
float Rs = (cosThetaI - eta * cosThetaT)
/ (cosThetaI + eta * cosThetaT);
float Rp = (eta * cosThetaI - cosThetaT)
/ (eta * cosThetaI + cosThetaT);
cosThetaT_ = (cosThetaI_ > 0) ? -cosThetaT : cosThetaT;
/* No polarization -- return the unpolarized reflectance */
return 0.5 * (Rs * Rs + Rp * Rp);
}
/* Common Eval Fresnel function. Permits to switch between FGD and non-FGD
* evaluations of the Fresnel term.
*/
void evalFresnel(in float ct, in vec3 eta, in vec3 kappa,
out vec3 Rij, out vec3 Tij)
{
Rij = (isZero(kappa)) ? fresnelDielectricExt(ct, eta[0]) * vec3(1.0, 1.0, 1.0) : fresnelConductorExact(ct, eta, kappa);
Tij = (isZero(kappa)) ? vec3(1.0, 1.0, 1.0) - Rij : vec3(0.0, 0.0, 0.0);
}
// Evaluation of the NDF.
//----begin----
// For perfect specular surfaces, it returns zero.
float EvaluateNDF( in vec3 halfvector, in vec2 roughness )
{
vec3 H = vec3( halfvector.x / roughness.x, halfvector.y / roughness.y, halfvector.z );
float squaredLength = dot(H, H);
return ( halfvector.z > 0.0f ) ? ( 1.0 / ( max( M_PI * roughness.x * roughness.y, FLT_MIN ) * ( squaredLength * squaredLength ) ) ) : 0.0;
}
float EvaluatePDFOverNDF( in vec3 incomingDir, in vec2 roughness )
{
float zi = abs( incomingDir.z );
float incomingLength = length( Stretch( incomingDir, roughness ) );
// The Heaviside functions are omitted in this implementation.
// This is not a problem for the specular microfacet BRDF.
return 0.5 / ( zi + incomingLength );
}
// PDF of outgoing directions using VNDFs
float EvaluatePDF( in vec3 incomingDir, in vec3 halfvector, in vec2 roughness )
{
return EvaluateNDF( halfvector, roughness ) * EvaluatePDFOverNDF( incomingDir, roughness );
}
// VNDF importance sampling for the Smith microsurface model.
// [Heitz 2018 "Sampling the GGX Distribution of Visible Normals"].
vec3 SampleMicrofacetNormal( in vec3 direction, in vec2 randomNumbers, in vec2 roughness )
{
// assert( isfinite( direction.x ) && isfinite( direction.y ) && isfinite( direction.z ) );
// assert( isfinite( randomNumbers.x ) && isfinite( randomNumbers.y ) );
// Stretch and normalize the view direction
const vec3 stretchedDir = normalize( Stretch( direction, roughness ) );
// Sample a point on the half disk.
const float radius = sqrt( randomNumbers.x );
const float phi = 2*M_PI * randomNumbers.y;
const float x = radius * cos( phi );
const float t = radius * sin( phi );
const float s = 0.5 * ( 1.0 + stretchedDir.z );
const float y = lerp( sqrt( 1.0 - x * x ), t, s );
// Build an orthonormal basis.
const vec3 unnormalizedBasisX = { -stretchedDir.y, stretchedDir.x, 0.0 };
const float basisXLength = length( unnormalizedBasisX );
const vec3 basisX = basisXLength != 0.0 ? unnormalizedBasisX / basisXLength : vec3( 1.0, 0.0, 0.0 );
const vec3 basisY = cross( stretchedDir, basisX );
// Compute the microfacet normal in the stretched space.
// z must be equal ot greater than 0, so it is clamed by 0 to improve the numerical stability.
const float z = sqrt( max( 1.0 - x * x - y * y, 0.0 ) );
const vec3 pos = vec3( x, y, z);
//const vec3 normal = vec3( dot(pos, basisX), dot(pos, basisY), dot(pos, stretchedDir));
const vec3 normal = mul( pos, mat3(basisX, basisY, stretchedDir));
// Unstretch and normalize the sampled microfacet normal.
const vec3 result = normalize( Stretch( normal, roughness ) );
//assert( isfinite( result.x ) && isfinite( result.y ) && isfinite( result.z ) );
return result;
}
//----end----
//Computing Adding double
//Blcour_2018 (Efficient Rendering of Layered Materials using an Atomic Decomposition with Statistical Operators)
void computeAddingDoubling(in float _cti, in vec3 m_etas[NB_LAYERS+1], in vec3 m_kappas[NB_LAYERS+1], in vec2 m_alphas[NB_LAYERS], in mat2 m_rotate[NB_LAYERS],
out vec3 coeffs[NB_LAYERS], out mat2 variance_mat[NB_LAYERS])
{
//variables
float cti = _cti;
vec3 R0i = vec3(0.0, 0.0, 0.0), Ri0 = vec3(0.0, 0.0, 0.0), T0i = vec3(1.0, 1.0, 1.0), Ti0 = vec3(1.0, 1.0, 1.0);
float s_r0i = 0.0, s_ri0=0.0, s_t0i=0.0, s_ti0=0.0;
mat2 s_r0i_ = 0.0, s_ri0_=0.0, s_t0i_=0.0, s_ti0_=0.0;
float j0i=1.0, ji0=1.0;
//Iterate over the layers
for(int i=0; i<NB_LAYERS; ++i)
{
//Extract layer data
vec3 eta_1 = m_etas[i];
vec3 eta_2 = m_etas[i+1];
vec3 kappa_2 = m_kappas[i+1];
vec3 eta = eta_2 / eta_1;
vec3 kappa = kappa_2 / eta_1;
vec2 alpha = m_alphas[i];
mat2 rotate = m_rotate[i];
float n12 = average(eta);
vec3 R12 = vec3(0.0, 0.0, 0.0), T12 = vec3(0.0, 0.0, 0.0), R21 = vec3(0.0, 0.0, 0.0), T21 = vec3(0.0, 0.0, 0.0);
float j12=1.0, j21=1.0, ctt = 0.0;
mat2 s_r12_ = mat2(0.0, 0.0, 0.0, 0.0), s_r21_=mat2(0.0, 0.0, 0.0, 0.0), s_t12_=mat2(0.0, 0.0, 0.0, 0.0), s_t21_=mat2(0.0, 0.0, 0.0, 0.0);
//Evaluate off-specular transmission
float sti = sqrt(1.0f - cti*cti);
float stt = sti / n12;
if(stt <= 1.0f) {
//const float scale = _clamp<float>((1.0f-alpha)*(sqrt(1.0f-alpha) + alpha), 0.0f, 1.0f);
//stt = scale*stt + (1.0f-scale)*sti;
ctt = sqrt(1.0f - stt*stt);
} else {
ctt = -1.0f;
}
/* Ray is not block by conducting interface or total reflection */
const bool has_transmissive = ctt > 0.0f && isZero(kappa);
/* Evaluate interface variance term */
vec2 s_r12 = roughnessToVariance(alpha);
s_r12_ = mul(mul(rotate, mat2(s_r12.x, 0.0,0.0, s_r12.y)) , transpose(rotate));
vec2 s_r21 = s_r12;
s_r21_ = s_r12_;
/* For dielectric interfaces, evaluate the transmissive roughnesses */
if(has_transmissive) {
const float _ctt = 1.0; // The scaling factor overblurs the BSDF at grazing
const float _cti = 1.0; // angles (we cannot account for the deformation of
// the lobe for those configurations.
vec2 s_t12 = roughnessToVariance(alpha * 0.5 * abs(_ctt*n12 - _cti)/(_ctt*n12));
s_t12_ = mul(mul(rotate, mat2(s_t12.x, 0.0,0.0, s_t12.y)), transpose(rotate));
vec2 s_t21 = roughnessToVariance(alpha * 0.5 * abs(_cti/n12 - _ctt)/(_cti/n12));
s_t21_ = mul(mul(rotate, mat2(s_t21.x, 0.0,0.0, s_t21.y)), transpose(rotate));
j12 = (ctt/cti) * n12; // Scale due to the interface
j21 = (cti/ctt) / n12;
}
/* Evaluate r12, r21, t12, t21 */
evalFresnel(cti, eta, kappa, R12, T12);
if(has_transmissive) {
R21 = R12;
T21 = T12 /* (n12*n12) */; // We don't need the IOR scaling since we are
T12 = T12 /* (n12*n12) */; // computing reflectance only here.
} else {
R21 = 0.0;
T21 = 0.0;
T12 = 0.0;
}
/* Multiple scattering forms */
const vec3 denom = (1.0 - Ri0*R12);
const vec3 m_R0i = (average(denom) <= 0.0)? 0.0 : (T0i*R12*Ti0) / denom;
const vec3 m_Ri0 = (average(denom) <= 0.0)? 0.0 : (T21*Ri0*T12) / denom;
const vec3 m_Rr = (average(denom) <= 0.0)? 0.0 : (Ri0*R12) / denom;
/* Evaluate the adding operator on the energy */
const vec3 e_R0i = R0i + m_R0i;
const vec3 e_T0i = (T0i*T12) / denom;
const vec3 e_Ri0 = R21 + m_Ri0;
const vec3 e_Ti0 = (T21*Ti0) / denom;
/* Scalar forms for the spectral quantities */
const float r0i = average(R0i);
const float e_r0i = average(e_R0i);
const float e_ri0 = average(e_Ri0);
const float m_r0i = average(m_R0i);
const float m_ri0 = average(m_Ri0);
const float m_rr = average(m_Rr);
const float r21 = average(R21);
/* Evaluate the adding operator on the normalized variance */
mat2 _s_r0i_ = (r0i*s_r0i_ + m_r0i*(s_ti0_ + j0i*(s_t0i_ + s_r12_ + m_rr*(s_r12_+s_ri0_)))) ;// e_r0i;
mat2 _s_t0i_ = j12*s_t0i_ + s_t12_ + j12*(s_r12_ + s_ri0_)*m_rr;
mat2 _s_ri0_ = (r21*s_r21_ + m_ri0*(s_t12_ + j12*(s_t21_ + s_ri0_ + m_rr*(s_r12_+s_ri0_)))) ;// e_ri0;
mat2 _s_ti0_ = ji0*s_t21_ + s_ti0_ + ji0*(s_r12_ + s_ri0_)*m_rr;
_s_r0i_ = (e_r0i > 0.0) ? _s_r0i_/e_r0i : 0.0;
_s_ri0_ = (e_ri0 > 0.0) ? _s_ri0_/e_ri0 : 0.0;
/* Store the coefficient and variance */
if(m_r0i > 0.0) {
coeffs[i] = m_R0i;
variance_mat[i] = s_ti0_ + j0i*(s_t0i_ + s_r12_ + m_rr*(s_r12_+s_ri0_));
} else {
coeffs[i] = 0.0;
variance_mat[i] = 0.0;
}
/* Update energy */
R0i = e_R0i;
T0i = e_T0i;
Ri0 = e_Ri0;
Ti0 = e_Ti0;
/* Update mean */
cti = ctt;
/* Update variance */
s_r0i_ = _s_r0i_;
s_t0i_ = _s_t0i_;
s_ri0_ = _s_ri0_;
s_ti0_ = _s_ti0_;
/* Update jacobian */
j0i *= j12;
ji0 *= j21;
/* Escape if a conductor is present */
if(average(kappa) > 0.0) {
return;
}
}
}
//Entry point
//For env light
void Anisotropic_layered_env( inout FragmentState s)
{
//material parrameters
//1st:air, 2nd:dielectric, 3rd:conductor
//or
//1st:dielectric, 2nd:conductor
vec3 m_etas[NB_LAYERS+1] = {vec3(1.0, 1.0, 1.0), uDielectric_eta, uConductor_eta};
vec3 m_kappas[NB_LAYERS+1] = {vec3(0.0, 0.0, 0.0), vec3(0.0, 0.0, 0.0), uConductor_kappa};
vec2 m_alphas[NB_LAYERS] = {uDielectric_alpha , uConductor_alpha};
float m_thetas[NB_LAYERS] = { 2.0*M_PI * (uDielectric_rotate/360.0), 2.0*M_PI * (uConductor_rotate/360.0) };
mat2 m_rotate[NB_LAYERS] = { mat2( cos(m_thetas[0]), -sin(m_thetas[0]), sin(m_thetas[0]), cos(m_thetas[0]) ),
mat2( cos(m_thetas[1]), -sin(m_thetas[1]), sin(m_thetas[1]), cos(m_thetas[1]) ) };
//We use two kinds of coordinates.
//The one is "Basis Coordinate" (basisX, basiY, basisZ)
//The other is "Local Coordinate" (u, v, basisz)
//Basis Cordinate
vec3 basisX = s.vertexTangent;
vec3 basisY = cross( basisX, s.normal );
vec3 basisZ = s.normal;
vec3 E_base = normalize(vec3(dot(s.vertexEye, basisX), dot(s.vertexEye, basisY), dot(s.vertexEye, basisZ)));
if(uMaterial_set == 0)
{
//Layered Material
//evaluate the adding method to get coeffs and variances
vec3 coeffs[NB_LAYERS] = {vec3(0.0, 0.0, 0.0), vec3(0.0, 0.0, 0.0)};
mat2 variance_mat_base[NB_LAYERS] = {mat2(0.0,0.0,0.0,0.0), mat2(0.0,0.0,0.0,0.0)};
computeAddingDoubling(E_base.z, m_etas, m_kappas, m_alphas, m_rotate,
coeffs, variance_mat_base);
vec2 eigenVec_max[NB_LAYERS];
vec2 eigenVec_min[NB_LAYERS];
vec2 alphas[NB_LAYERS];
vec3 us[NB_LAYERS];
vec3 vs[NB_LAYERS];
vec3 E_locals[NB_LAYERS];
//calculate eigen values and vecs
for(int i=0; i<NB_LAYERS; ++i) {
vec2 eigenVals = CalculateEigenValues(variance_mat_base[i]);
eigenVec_max[i] = CalculateEigenVectorMax(variance_mat_base[i], eigenVals.x);
eigenVec_min[i] = CalculateEigenVectorMin(variance_mat_base[i], eigenVals.y);
us[i] = eigenVec_max[i].x * basisX + eigenVec_max[i].y * basisY;
vs[i] = eigenVec_min[i].x * basisX + eigenVec_min[i].y * basisY;
alphas[i] = vec2(varianceToRoughness(eigenVals.x), varianceToRoughness(eigenVals.y));
E_locals[i] = normalize(vec3(dot(s.vertexEye, us[i]), dot(s.vertexEye, vs[i]), dot(s.vertexEye, basisZ)));
}
for(int j=0; j<uSample_num; j++)
{
for(int index=0; index<NB_LAYERS; index++)
{
//generate random numbers
float r_x = 0.5 * s.screenTexCoord.x + 0.5 * float(j) / float(uSample_num);
float r_y = 0.5 * s.screenTexCoord.y + 0.5 * float(j) / float(uSample_num);
randState.xy = vec2(r_x, r_y);
vec2 rand2 = vec2(rand(), rand());
vec3 H_local = SampleMicrofacetNormal( E_locals[index], rand2, alphas[index] );
vec3 H = vec3( dot(vec3( us[index].x, vs[index].x, basisZ.x), H_local),
dot(vec3( us[index].y, vs[index].y, basisZ.y), H_local),
dot(vec3( us[index].z, vs[index].z, basisZ.z), H_local));
vec3 L = (2.0 * dot( s.vertexEye, H )) * H - s.vertexEye;
vec3 L_local = (2.0 * dot( E_locals[index], H_local )) * H_local - E_locals[index];
if (E_locals[index].z * L_local.z <= 0.0) {
continue;
}
vec3 sampleCol = textureCubeLod( tReflectionCubeMap, L, 0.0).xyz;
vec3 R = vec3(0.0, 0.0, 0.0);
float pdf = EvaluatePDF(L_local, H_local, alphas[index]);
R = AnisotropicMicrofacetBRDF(L_local, E_locals[index], alphas[index]) * coeffs[index] * max(L_local.z, 0.0) * sampleCol;
s.specularLight += R / pdf;
}
}
} else
{
//Dielectric or Conductor
mat2 rotate = m_rotate[uMaterial_set-1];
vec2 roughness = vec2(m_alphas[uMaterial_set-1].x, m_alphas[uMaterial_set-1].y);
vec3 u = basisX*rotate._11 + (1.0-rotate._11)*dot(basisX, basisZ)*basisZ + cross(basisZ, basisX)*rotate._12;
vec3 v = cross( u, s.normal );
vec3 E_local = normalize(vec3(dot(s.vertexEye, u), dot(s.vertexEye, v), dot(s.vertexEye, basisZ)));
for(int i=0; i<uSample_num; i++)
{
float r_x = 0.5 * s.screenTexCoord.x + 0.5 * float(i) / float(uSample_num);
float r_y = 0.5 * s.screenTexCoord.y + 0.5 * float(i) / float(uSample_num);
randState.xy = vec2(r_x, r_y);
vec2 rand2 = vec2(rand(), rand());
vec3 H_local = SampleMicrofacetNormal( E_local, rand2, roughness);
vec3 H = vec3( dot(vec3(u.x, v.x, basisZ.x), H_local),
dot(vec3(u.y, v.y, basisZ.y), H_local),
dot(vec3(u.z, v.z, basisZ.z), H_local));
vec3 L = (2.0 * dot( s.vertexEye, H )) * H - s.vertexEye;
vec3 L_local = (2.0 * dot( E_local, H_local )) * H_local - E_local;
vec3 L_base = normalize(vec3(dot(L, basisX), dot(L, basisY), dot(L, basisZ)));
float pdf = EvaluatePDF(L_local, H_local, roughness);
float lod = 0.0;
//fresnel
vec3 F = uMaterial_set == 1 ? fresnelDielectricExt(E_local.z, m_etas[uMaterial_set]) : fresnelConductorExact(E_local.z, m_etas[uMaterial_set], m_kappas[uMaterial_set]);
//final
vec3 sampleCol = textureCubeLod( tReflectionCubeMap, L, lod).xyz;
s.specularLight += AnisotropicMicrofacetBRDF(L_local, E_local, roughness) * F / pdf * max(L_local.z, 0.0) * sampleCol;
}
}
s.specularLight /= uSample_num;
}
#ifdef ReflectionEnv
#undef ReflectionEnv
#endif
#define ReflectionEnv Anisotropic_layered_env
#endif //passes
<|start_filename|>BRDF/AnisotropicMicrofacetBRDF.frag<|end_filename|>
#ifndef ANISOTROPIC_MICROFACET_BRDF_frag
#define ANISOTROPIC_MICROFACET_BRDF_frag
#include "NumericConstants.frag"
#include "MathConstants.frag"
// Microfacet BRDF using the GGX NDF with the Smith height-correlated masking and shadowing function
// [<NAME>, "Understanding the Masking-Shadowing Function in Microfacet-Based BRDFs" (2014)]
// Axis-aligned Anisotropic NDF
float GGX( in vec3 halfvector, in vec2 roughness )
{
const vec3 stretchedHalfvector = vec3( halfvector.x / roughness.x, halfvector.y / roughness.y, halfvector.z );
const float stretchedSquaredLength = dot( stretchedHalfvector, stretchedHalfvector );
return 1.0 / ( M_PI * ( roughness.x * roughness.y ) * ( stretchedSquaredLength * stretchedSquaredLength ) );
}
// Axis-aligned Anisotropic BRDF
float AnisotropicMicrofacetBRDF( in vec3 incomingDir, in vec3 outgoingDir, in vec2 roughness )
{
const vec3 halfvector = normalize( incomingDir + outgoingDir );
const float zi = abs( incomingDir.z );
const float zo = abs( outgoingDir.z );
const float stretchedIncomingLength = length( vec3( incomingDir.x * roughness.x, incomingDir.y * roughness.y, incomingDir.z ) );
const float stretchedOutgoingLength = length( vec3( outgoingDir.x * roughness.x, outgoingDir.y * roughness.y, outgoingDir.z ) );
return min( GGX( halfvector, roughness ) / ( 2.0 * ( zo * stretchedIncomingLength + zi * stretchedOutgoingLength ) ), FLT_MAX );
}
#endif
<|start_filename|>BRDF/NumericConstants.frag<|end_filename|>
#ifndef NUMERIC_CONSTANTS_frag
#define NUMERIC_CONSTANTS_frag
#define FLT_MANT_BITS ( 23 )
#define FLT_MIN ( 1.175494351e-38f )
#define FLT_MAX ( 3.402823466e+38f )
#define FLT_MAX_EXP ( 128 )
#define FLT_MIN_EXP ( -125 )
#define FLT_EPSILON ( 1.192092896e-07f )
#define HLF_MANT_BITS ( 10 )
#define HLF_MIN ( 1.0 / ( 1 << 14 ) )
#define HLF_MAX ( 65504.0 )
#define RGBE_MAX_EXP ( 16 )
#define RGBE_EXP_OFFSET ( 15 )
#define RGBE_MANT_BITS ( 9 )
#define RGBE_EXP_BITS ( 5 )
#define RGBE_MANT_RANGE ( 1 << RGBE_MANT_BITS )
#define RGBE_EXP_RANGE ( 1 << RGBE_EXP_BITS )
#define RGBE_MIN_NORMALIZED ( 1.0 / ( 1 << RGBE_EXP_OFFSET ) )
#endif
<|start_filename|>BRDF/MathConstants.frag<|end_filename|>
#ifndef MATH_CONSTANTS_frag
#define MATH_CONSTANTS_frag
#define M_PI ( 3.1415926535897932384626433832795 )
#endif
| tomoya5296/anisotropic_layered_material |
<|start_filename|>attic/demo/arduino-due/due-#9000-st7789/main.cpp<|end_filename|>
#include "hwlib.hpp"
namespace target = hwlib::target;
int main( void ){
// wait for the terminal emulator to start up
hwlib::wait_ms( 2'000 );
hwlib::cout << "ST7789 demo\n" << hwlib::flush;
auto _sclk = hwlib::target::pin_out{ hwlib::target::pins::d5 };
auto sclk = hwlib::invert( _sclk );
auto mosi = hwlib::target::pin_out{ hwlib::target::pins::d4 };
auto spi = hwlib::spi_bus_bit_banged_sclk_mosi_miso{
sclk, mosi, hwlib::pin_in_dummy };
auto dc = hwlib::target::pin_out{ hwlib::target::pins::d2 };
auto & cs = hwlib::pin_out_dummy;
//auto blk = hwlib::target::pin_out{ hwlib::target::pins::d7 };
auto rst = hwlib::target::pin_out{ hwlib::target::pins::d3 };
//blk.write( 1 );blk.flush();
auto display = hwlib::st7789_spi_dc_cs_rst( spi, dc, cs, rst );
for(;;){
display.clear( hwlib::red );
display.flush();
display.clear( hwlib::green );
display.flush();
display.clear( hwlib::blue );
display.flush();
}
}
<|start_filename|>attic/demo/arduino-uno/uno-#0100-pcfa8574a-lcd-16x2/main.cpp<|end_filename|>
// ==========================================================================
//
// Hello world on an Arduino Uno
//
// (c) <NAME> (<EMAIL>) 2017
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// ==========================================================================
#include "hwlib.hpp"
namespace target = hwlib::target;
int main( void ){
auto scl = hwlib::target::pin_oc( hwlib::target::pins::scl );
auto sda = hwlib::target::pin_oc( hwlib::target::pins::sda );
auto i2c_bus = hwlib::i2c_bus_bit_banged_scl_sda{ scl,sda };
auto chip = hwlib::pcf8574a{ i2c_bus, 0x27 };
auto d4 = hwlib::pin_out_from( chip.p4 );
auto d5 = hwlib::pin_out_from( chip.p5 );
auto d6 = hwlib::pin_out_from( chip.p6 );
auto d7 = hwlib::pin_out_from( chip.p7 );
auto d = hwlib::port_out_from( d4, d5, d6, d7 );
auto rs = hwlib::pin_out_from( chip.p0 );
auto rw = hwlib::pin_out_from( chip.p1 );
auto e = hwlib::pin_out_from( chip.p2 );
rw.write( 0 );rw.flush();
auto lcd = hwlib::hd44780( rs, e, d, hwlib::xy( 16, 2 ) );
lcd
<< "\fHello world!"
//<< "\n=== 2nd line ==="
<< hwlib::flush;
}
<|start_filename|>library/targets/hwlib-atmega328.hpp<|end_filename|>
// ==========================================================================
//
// File : hwlib-atmega328.hpp
// Part of : C++ hwlib library for close-to-the-hardware OO programming
// Copyright : <EMAIL> 2017-2019
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// ==========================================================================
// this file contains Doxygen lines
/// @file
#ifndef HWLIB_ATMEGA328_H
#define HWLIB_ATMEGA328_H
#define _HWLIB_TARGET_WAIT_US_BUSY
#include HWLIB_INCLUDE( ../hwlib-all.hpp )
#include <stdint.h>
#include "avr/io.h"
extern "C" void HWLIB_WEAK __cxa_pure_virtual(){
for(;;){
// This if for the side-effect only:
// a never-ending loop without side-effect is UB.
(void)PORTB;
}
}
namespace atmega328 {
volatile uint8_t & HWLIB_WEAK port_data( uint_fast8_t port ){
switch( port ){
// case 0 : return PORTA;
case 1 : return PORTB;
case 2 : return PORTC;
case 3 : return PORTD;
default : break;
}
HWLIB_PANIC_WITH_LOCATION;
// doesn't return
}
volatile uint8_t & HWLIB_WEAK port_input( uint_fast8_t port ){
switch( port ){
// case 0 : return DDRA;
case 1 : return PINB;
case 2 : return PINC;
case 3 : return PIND;
default : break;
}
HWLIB_PANIC_WITH_LOCATION;
// doesn't return
}
volatile uint8_t & HWLIB_WEAK port_direction( uint_fast8_t port ){
switch( port ){
// case 0 : return DDRA;
case 1 : return DDRB;
case 2 : return DDRC;
case 3 : return DDRD;
default : break;
}
HWLIB_PANIC_WITH_LOCATION;
// doesn't return
}
void HWLIB_WEAK configure_as_gpio( uint_fast8_t port, uint_fast8_t pin ){
if( port == 3 ){
if( pin == 0 ) {
UCSR0B = UCSR0B & ~ 0x10; // disable UART receive
}
if( pin == 1 ){
UCSR0B = UCSR0B & ~ 0x08; // disable UART transmit
}
}
}
class pin_adc : public hwlib::adc {
private:
uint_fast8_t pin;
public:
pin_adc( uint_fast8_t pin ):
adc( 10 ),
pin( pin )
{
// reference is AVCC
ADMUX = 0x01 << REFS0;
// Enable the ADC and prescale
ADCSRA = 7 | ( 0x01 << ADEN );
}
uint_fast32_t read() override {
// select the ADC input pin
ADMUX = ( 0x01 << REFS0 ) | pin;
// start the conversion.
ADCSRA = ADCSRA | ( 0x01 << ADSC );
// wait for the conversion to finish
while ( (ADCSRA & ( 0x01 << ADSC )) != 0 ){}
return ADCW;
}
void refresh() override {}
};
/// pin_in implementation for an ATMega328P
class pin_in : public hwlib::pin_in {
private:
volatile uint8_t & port_out;
volatile uint8_t & port_in;
uint8_t mask;
void write( bool v ){
if( v ){
port_out = port_out | mask;
} else {
port_out = port_out & ~mask;
}
}
public:
/// pin_in constructor from ATMega328P port/pin numbers
///
/// This call creates a pin_in from an ATMega328P port/pin
/// number pair.
///
/// This constructor sets the pin direction to input.
pin_in( uint8_t port_number, uint8_t pin_number ):
port_out{ port_data( port_number ) },
port_in{ port_input( port_number ) },
mask( 0x1 << pin_number )
{
configure_as_gpio( port_number, pin_number );
port_direction( port_number ) = port_direction( port_number ) & ~mask;
}
bool read() override {
return ( port_in & mask ) != 0;
}
void refresh() override {}
void pullup_enable(){
write( 1 );
}
void pullup_disable(){
write( 0 );
}
};
/// pin_out implementation for a ATMega328P
class pin_out : public hwlib::pin_out {
private:
volatile uint8_t & port_out;
uint8_t mask;
public:
/// pin_out constructor from ATMega328P port/pin numbers
///
/// This call creates a pin_out from an ATMega328P port/pin
/// number pair.
///
/// This constructor sets the pin direction to output.
pin_out( uint8_t port_number, uint8_t pin_number ):
port_out{ port_data( port_number ) },
mask( 0x1 << pin_number )
{
configure_as_gpio( port_number, pin_number );
port_direction( port_number ) = port_direction( port_number ) | mask;
}
void write( bool v ) override {
if( v ){
port_out = port_out | mask;
} else {
port_out = port_out & ~mask;
}
}
void flush() override {}
};
/// pin_in_out implementation for a ATMega328P
class pin_in_out : public hwlib::pin_in_out {
private:
volatile uint8_t & port_in;
volatile uint8_t & port_out;
uint8_t port_number;
uint8_t mask;
public:
/// pin_in_out constructor
///
/// Constructor for a ATMega328P input/output pin.
///
/// This constructor doesn't set the pin direction
/// to input or output, a direction_set function must
/// be called to do so.
pin_in_out( uint8_t port_number, uint8_t pin_number ):
port_in{ port_input( port_number ) },
port_out{ port_data( port_number ) },
port_number( port_number ),
mask( 0x1 << pin_number )
{
configure_as_gpio( port_number, pin_number );
}
virtual void direction_set_input() override {
port_direction( port_number ) = port_direction( port_number ) & ~ mask;
}
bool read() override {
return ( port_in & mask ) != 0;
}
virtual void direction_set_output() override {
port_direction( port_number ) = port_direction( port_number ) | mask;
}
void write( bool v ) override {
if( v ){
port_out = port_out | mask;
} else {
port_out = port_out & ~mask;
}
}
void refresh() override {}
void flush() override {}
void direction_flush() override {}
void pullup_enable(){
write( 1 );
}
void pullup_disable(){
write( 0 );
}
};
/// pin_oc implementation for a ATMega328P
class pin_oc : public hwlib::pin_oc {
private:
volatile uint8_t & port_in;
volatile uint8_t & port_out;
uint8_t port_number;
uint8_t mask;
public:
/// pin_oc constructor
///
/// The port_number and pin_number refer to the chip,
/// not to the Arduino board pin names.
///
/// This constructor sets the pin to high (high-impedance).
pin_oc( uint8_t port_number, uint8_t pin_number ):
port_in{ port_input( port_number ) },
port_out{ port_data( port_number ) },
port_number( port_number ),
mask( 0x1 << pin_number )
{
configure_as_gpio( port_number, pin_number );
}
bool read() override {
return ( port_in & mask ) != 0;
}
void write( bool v ) override {
if( v ){
port_direction( port_number ) = port_direction( port_number ) & ~ mask;
} else {
port_direction( port_number ) = port_direction( port_number ) | mask;
port_out = port_out & ~mask;
}
}
void refresh() override {}
void flush() override {}
};
bool uart_char_available();
char uart_getc();
void uart_putc( char c );
#ifdef _HWLIB_ONCE
void uart_init(){
static bool init_done = false;
if( init_done ){
return;
}
init_done = true;
// set baudrate
uint64_t UBRR_VALUE = ((( 16'000'000 / ( BMPTK_BAUDRATE * 16UL ))) - 1 );
UBRR0H = (uint8_t) ( UBRR_VALUE >> 8 );
UBRR0L = (uint8_t) UBRR_VALUE;
// format : 8 data bits, no parity, 1 stop bit
UCSR0C = 0x06;
// enable rx and tx
UCSR0B = (1<<RXEN0)|(1<<TXEN0);
}
bool uart_char_available(){
uart_init();
return ( UCSR0A & ( 0x01<<RXC0 ));
}
char uart_getc(){
// uart_init() is not needed because uart_char_available does that
while( ! uart_char_available() ){
hwlib::background::do_background_work();
}
return UDR0;
}
void uart_putc( char c ){
uart_init();
while( !( UCSR0A & ( 0x01 << UDRE0 ))){
hwlib::background::do_background_work();
}
UDR0 = c;
}
#endif // _HWLIB_ONCE
}; // namespace atmega328
namespace hwlib {
const auto target_chip = target_chips::atmega328p;
#ifdef _HWLIB_ONCE
void HWLIB_WEAK wait_us_asm( int n ){
// first int parameter is passd in r24/r25
__asm volatile( // clocks
"1: cp r1, r24 \t\n" // 1
" cpc r1, r25 \t\n" // 1
" brge 3f \t\n" // 1
" rcall 3f \t\n" // 7
" rjmp 2f \t\n" // 2
"2: sbiw r24, 0x01 \t\n" // 2
" rjmp 1b \t\n" // 2
"3: \t\n" // 16 total
: : "r" ( n ) // uses (reads) n
);
}
/*
void HWLIB_WEAK wait_us_busy( int_fast32_t n ){
while( n > 0 ){
if( n < 10'000 ){
wait_us_asm( n );
return;
}
wait_us_asm( 1'000 );
n -= 1'000;
}
}
*/
/// the number of ticks per us
uint_fast64_t ticks_per_us(){
return 16;
}
//static inline uint16_t last_low = 0;
//static inline uint_fast64_t high = 0;
static uint16_t last_low = 0;
static uint_fast64_t high = 0;
uint_fast64_t now_ticks(){
static bool init_done = false;
if( ! init_done ){
// set up timer 1 without prescaler (input=CPU clock)
TCCR1B = 0x01;
init_done = true;
}
uint16_t low = TCNT1L;
low |= ( TCNT1H << 8 );
if( low < last_low ){
// the timer rolled over, so increment the high part
high += 0x1ULL << 16;
}
last_low = low;
return ( low | high );
}
uint64_t now_us(){
return now_ticks() / ticks_per_us();
}
// busy waits
void wait_ns_busy( int_fast32_t n ){
wait_us_busy( ( n + 999 ) / 1000 );
}
void wait_us_busy( int_fast32_t n ){
auto end = now_us() + n;
while( now_us() < end ){}
}
void wait_ms_busy( int_fast32_t n ){
while( n > 0 ){
wait_us_busy( 1000 );
--n;
}
}
// non-busy waits
void HWLIB_WEAK wait_ns( int_fast32_t n ){
wait_us( ( n + 999 ) / 1000 );
}
void HWLIB_WEAK wait_us( int_fast32_t n ){
auto end = now_us() + n;
while( now_us() < end ){
background::do_background_work();
}
}
void HWLIB_WEAK wait_ms( int_fast32_t n ){
while( n > 0 ){
wait_us( 1000 );
--n;
}
}
#define HWLIB_USE_HW_UART
#ifdef HWLIB_USE_HW_UART
void HWLIB_WEAK uart_putc( char c ){
atmega328::uart_putc( c );
}
bool HWLIB_WEAK uart_char_available(){
return atmega328::uart_char_available();
}
char HWLIB_WEAK uart_getc( ){
return atmega328::uart_getc();
}
#else
void HWLIB_WEAK uart_putc( char c ){
static atmega328::pin_out pin( 3, 1 );
uart_putc_bit_banged_pin( c, pin );
}
char HWLIB_WEAK uart_getc(){
static atmega328::pin_in pin( 1, 6 );
return uart_getc_bit_banged_pin( pin );
}
#endif
#endif
}; //namespace hwlib
#endif // HWLIB_ATMGEA328_H
<|start_filename|>library/shields/hwlib-arduino-multifunction-shield.hpp<|end_filename|>
// ==========================================================================
//
// File : hwlib-arduino-multifunction-shield.hpp
// Part of : C++ hwlib library for close-to-the-hardware OO programming
// Copyright : <EMAIL> 2021
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// ==========================================================================
// included only via hwlib.hpp, hence no multiple-include guard is needed
// https://www.robkalmeijer.nl/techniek/computer/arduino/uno/mfs/index.html
// this file contains Doxygen lines
/// @file
#ifdef HWLIB_TARGET_arduino_uno
namespace hwlib {
class multifunction_shield {
target::pin_out led_1_inv, led_2_inv, led_3_inv, led_4_inv;
target::pin_in switch_1_inv, switch_2_inv, switch_3_inv;
target::pin_out beeper_inv;
public:
hwlib::pin_invert_from_out_t led_1, led_2, led_3, led_4;
hwlib::port_out_from_pins_t leds;
hwlib::pin_invert_from_in_t switch_1, switch_2, switch_3;
hwlib::port_in_from_pins_t switches;
hwlib::pin_invert_from_out_t beeper;
multifunction_shield():
led_1_inv( target::pins::d13 ),
led_2_inv( target::pins::d12 ),
led_3_inv( target::pins::d11 ),
led_4_inv( target::pins::d10 ),
switch_1_inv( target::pins::a1 ),
switch_2_inv( target::pins::a2 ),
switch_3_inv( target::pins::a3 ),
beeper_inv( target::pins::d3 ),
led_1( led_1_inv ),
led_2( led_2_inv ),
led_3( led_3_inv ),
led_4( led_4_inv ),
leds( led_1, led_2, led_3, led_4 ),
switch_1( switch_1_inv ),
switch_2( switch_2_inv ),
switch_3( switch_3_inv ),
switches( switch_1, switch_2, switch_3 ),
beeper( beeper_inv )
{
led_1.write( 0 ); led_1.flush();
led_2.write( 0 ); led_2.flush();
led_3.write( 0 ); led_3.flush();
led_4.write( 0 ); led_4.flush();
beeper.write( 0 ); beeper.flush();
}
// missing:
// 7-segment displays - needs library support
// potentiometer A/D input
// LM35
// DS1820
// IR receiver
// APC220
}; // class multifunction_shield
}; // namespace hwlib
#endif
<|start_filename|>library/targets/hwlib-termite.hpp<|end_filename|>
// ==========================================================================
//
// File : hwlib-termite.hpp
// Part of : C++ hwlib library for close-to-the-hardware OO programming
// Copyright : <EMAIL> 2017
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// ==========================================================================
// this file contains Doxygen lines
/// @file
#ifndef HWLIB_TERMITE_H
#define HWLIB_TERMITE_H
#include HWLIB_INCLUDE(../hwlib-all.hpp)
namespace stm32f1xx {
/// GPIO pin names
enum class pins {
a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15,
b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15,
c0, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15,
led_red,led_green,led_blue,i2c_scl,i2c_sda,eeprom_scl,eeprom_sda,
spi_cs,spi_sclk,spi_miso,spi_mosi,uart_tx,uart_rx,
/// \cond INTERNAL
SIZE_THIS_IS_NOT_A_PIN
/// \endcond
};
/// \cond INTERNAL
struct pin_info_type {
uint8_t port;
uint8_t pin;
};
const HWLIB_WEAK pin_info_type &pin_info( pins name ) {
static const pin_info_type pin_info_array[ (int) pins::SIZE_THIS_IS_NOT_A_PIN ] = {
{ 0, 0 }, // a0
{ 0, 1 }, // a1
{ 0, 2 }, // a2
{ 0, 3 }, // a3
{ 0, 4 }, // a4
{ 0, 5 }, // a5
{ 0, 6 }, // a6
{ 0, 7 }, // a7
{ 0, 8 }, // a8
{ 0, 9 }, // a9
{ 0, 10 }, // a10
{ 0, 11 }, // a11 - USB
{ 0, 12 }, // a12 - USB
{ 0, 13 }, // a13 - SWD
{ 0, 14 }, // a14 - SWD
{ 0, 15 }, // a15
{ 1, 0 }, // b0
{ 1, 1 }, // b1
{ 1, 2 }, // b2
{ 1, 3 }, // b3
{ 1, 4 }, // b4
{ 1, 5 }, // b5
{ 1, 6 }, // b6
{ 1, 7 }, // b7
{ 1, 8 }, // b8
{ 1, 9 }, // b9
{ 1, 10 }, // b10
{ 1, 11 }, // b11
{ 1, 12 }, // b12
{ 1, 13 }, // b13
{ 1, 14 }, // b14
{ 1, 15 }, // b15
{ 2, 0 }, // c0
{ 2, 1 }, // c1
{ 2, 2 }, // c2
{ 2, 3 }, // c3
{ 2, 4 }, // c4
{ 2, 5 }, // c5
{ 2, 6 }, // c6
{ 2, 7 }, // c7
{ 2, 8 }, // c8
{ 2, 9 }, // c9
{ 2, 10 }, // c10
{ 2, 11 }, // c11
{ 2, 12 }, // c12
{ 2, 13 }, // c13
{ 2, 14 }, // c14
{ 2, 15 }, // c15
{ 2, 14 }, // led_red
{ 2, 15 }, // led_green
{ 2, 13 }, // led_blue
{ 1, 8 }, // i2c_scl
{ 1, 9 }, // i2c_sda
{ 1, 10 }, // eeprom_scl
{ 1, 11 }, // eeprom_sda
{ 0, 4 }, // spi_cs
{ 0, 5 }, // spi_sclk
{ 0, 6 }, // spi_miso
{ 0, 7 }, // spi_mosi
{ 0, 9 }, // uart_tx
{ 0, 10 } // uart_rx
};
uint_fast8_t n = static_cast< uint_fast8_t>( name );
if (n >= static_cast< uint_fast8_t>( pins::SIZE_THIS_IS_NOT_A_PIN )) {
HWLIB_PANIC_WITH_LOCATION;
}
return pin_info_array[ n ];
}
/// \endcond
}; // namespace termite
#include HWLIB_INCLUDE( hwlib-stm32f1xx.hpp )
namespace termite = ::stm32f1xx;
namespace hwlib {
namespace target = ::termite;
const auto target_chip = target_chips::stm32f103c8;
const auto target_board = target_boards::termite;
};
#endif // #ifdef HWLIB_TERMITE_H
<|start_filename|>library/hwlib-all.hpp<|end_filename|>
// ==========================================================================
//
// File : hwlib-all.hpp
// Part of : C++ hwlib library for close-to-the-hardware OO programming
// Copyright : <EMAIL> 2017-2019
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// ==========================================================================
// This file includes all target-independent hwlib files
// (except hwlib-doxygen.hpp, which contains only Doxygen stuff)
// to add a file, add it here and in the Makefile.inc
//
// C/C++ library files are included here rather than in the individual
// library files.
// The individual hwlib header files depend on the order in which they
// are included here, and don't have multiple-include guards.
// They are not meant to be included individually.
//
// In normal use, this file is included by a target-specific hwlib file.
#ifndef HWLIB_ALL_H
#define HWLIB_ALL_H
#include <cstdint>
#include <array>
#include <cstddef>
#include <type_traits>
#include <numeric>
#include <cmath>
#include HWLIB_INCLUDE( core/hwlib-defines.hpp )
#include HWLIB_INCLUDE( core/hwlib-targets.hpp )
#include HWLIB_INCLUDE( core/hwlib-panic.hpp )
#include HWLIB_INCLUDE( core/hwlib-noncopyable.hpp )
#include HWLIB_INCLUDE( core/hwlib-common.hpp )
#include HWLIB_INCLUDE( core/hwlib-ratio.hpp )
#include HWLIB_INCLUDE( core/hwlib-background.hpp )
#include HWLIB_INCLUDE( core/hwlib-xy.hpp )
#include HWLIB_INCLUDE( core/hwlib-color.hpp )
#include HWLIB_INCLUDE( core/hwlib-random.hpp )
#include HWLIB_INCLUDE( core/hwlib-wait.hpp )
#include HWLIB_INCLUDE( pins/hwlib-pin.hpp )
#include HWLIB_INCLUDE( pins/hwlib-pin-dummies.hpp )
#include HWLIB_INCLUDE( pins/hwlib-pin-stores.hpp )
#include HWLIB_INCLUDE( pins/hwlib-pin-from-pin.hpp )
#include HWLIB_INCLUDE( pins/hwlib-pin-invert.hpp )
#include HWLIB_INCLUDE( pins/hwlib-pin-all.hpp )
#include HWLIB_INCLUDE( pins/hwlib-pin-direct.hpp )
#include HWLIB_INCLUDE( pins/hwlib-pin-demos.hpp )
#include HWLIB_INCLUDE( ports/hwlib-port.hpp )
#include HWLIB_INCLUDE( ports/hwlib-port-from-port.hpp )
#include HWLIB_INCLUDE( ports/hwlib-port-from-pins.hpp )
#include HWLIB_INCLUDE( ports/hwlib-port-invert.hpp )
#include HWLIB_INCLUDE( ports/hwlib-port-all.hpp )
#include HWLIB_INCLUDE( ports/hwlib-port-direct.hpp )
#include HWLIB_INCLUDE( ports/hwlib-port-demos.hpp )
#include HWLIB_INCLUDE( char-io/hwlib-ostream.hpp )
#include HWLIB_INCLUDE( char-io/hwlib-istream.hpp )
#include HWLIB_INCLUDE( char-io/hwlib-bb-uart.hpp )
#include HWLIB_INCLUDE( char-io/hwlib-console.hpp )
#include HWLIB_INCLUDE( char-io/hwlib-terminal.hpp )
#include HWLIB_INCLUDE( char-io/hwlib-terminal-demos.hpp )
#include HWLIB_INCLUDE( core/hwlib-test.hpp )
#include HWLIB_INCLUDE( core/hwlib-string.hpp )
#include HWLIB_INCLUDE( core/hwlib-adc.hpp )
#include HWLIB_INCLUDE( core/hwlib-dac.hpp )
#include HWLIB_INCLUDE( core/hwlib-dac-demos.hpp )
#include HWLIB_INCLUDE( core/hwlib-servo.hpp )
#include HWLIB_INCLUDE( core/hwlib-i2c.hpp )
#include HWLIB_INCLUDE( core/hwlib-spi.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-image.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-image-decorators.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-font.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-window.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-canvas.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-drawables.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-window-decorators.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-window-demos.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-window-terminal.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-font-8x8.hpp )
#include HWLIB_INCLUDE( graphics/hwlib-graphics-font-16x16.hpp )
#include HWLIB_INCLUDE( peripherals/hwlib-pcf8574.hpp )
#include HWLIB_INCLUDE( peripherals/hwlib-pcf8591.hpp )
#include HWLIB_INCLUDE( peripherals/hwlib-hc595.hpp )
#include HWLIB_INCLUDE( peripherals/hwlib-hd44780.hpp )
#include HWLIB_INCLUDE( peripherals/hwlib-glcd-5510.hpp )
#include HWLIB_INCLUDE( peripherals/hwlib-glcd-oled.hpp )
#ifndef __AVR__
#include HWLIB_INCLUDE( peripherals/hwlib-glcd-st7789.hpp )
#endif
#include HWLIB_INCLUDE( peripherals/hwlib-matrix-keypad.hpp )
#include HWLIB_INCLUDE( peripherals/hwlib-servo-background.hpp )
#endif // HWLIB_ALL_H
<|start_filename|>library/targets/hwlib-mimxrt1062.hpp<|end_filename|>
///@file
// ==========================================================================
//
// File : hwlib-mimxrt1062.hpp
// Part of : C++ hwlib library for close-to-the-hardware OO programming
// Copyright : <EMAIL> 2017-2019
// Author : <NAME>
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// ==========================================================================
// this file contains Doxygen lines
#include "mimxrt1062.h"
#ifndef HWLIB_MIMXRT1062
#define HWLIB_MIMXRT1062
namespace mimxrt1062
{
/**
* @brief Struct containing all the information needed to map the right Teensy pin to the right core pad/pin and all config registers.
*
*/
struct core_pin
{
/**
* @brief The base adress to the GPIO port.
*
*/
uint32_t GPIO_port_base_adress;
/**
* @brief The (bit) number this pin is within the GPIO port.
*
*/
uint8_t GPIO_port_bit_number;
/**
* @brief The base adress to the multiplexer control register to multiplex a chip pad to a hardware block.
*
*/
uint32_t IOMUXC_MUX_control_register_array_index;
/**
* @brief The base adress to the chip pad configuration adress.
*
*/
uint32_t IOMUXC_PAD_control_register_array_index;
/**
* @brief The base adress to the serial port.
* @details This base adress maps to the base adress of a serial port. The bitnumber within te port is the same as the GPIO_port_bit_number. This port is used for UART communication and defaults to 0XFFFFFFFF.
*
*/
uint32_t serial_base_adress;
/**
* @brief The ad conversion channel number.
* @details this value defaults to 0xFFFFFFFF.
*/
uint32_t ad_channel;
// constructor
constexpr core_pin(uint32_t GPIO_port_base_adress, uint8_t GPIO_port_bit_number, uint32_t IOMUXC_MUX_control_register_array_index, uint32_t IOMUXC_PAD_control_register_array_index, uint32_t serial_base_adress = 0XFFFFFFFF, uint32_t ad_channel = 0XFFFFFFFF):
GPIO_port_base_adress(GPIO_port_base_adress), GPIO_port_bit_number(GPIO_port_bit_number),
IOMUXC_MUX_control_register_array_index(IOMUXC_MUX_control_register_array_index),
IOMUXC_PAD_control_register_array_index(IOMUXC_PAD_control_register_array_index),
serial_base_adress(serial_base_adress),
ad_channel(ad_channel)
{};
};
/**
* @brief Struct containing the core pin structs that map a chip pad to the configuration register adresses and IO ports
* @details all this info is gotten from the consumer reference manual in combination with the Arduino IDE library. This may be a bit vague
* but every pin was tested for it's functionality and confirmed working
*
*/
constexpr core_pin core_pin_struct_array[40] =
{
{GPIO6_BASE,3,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B0_03,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B0_03,LPUART6_BASE},
{GPIO6_BASE,2,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B0_02,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B0_02,LPUART6_BASE},
{GPIO9_BASE,4,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_04,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_04},
{GPIO9_BASE,5,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_05,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_05},
{GPIO9_BASE,6,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_06,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_06},
{GPIO9_BASE,8,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_08,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_08},
{GPIO7_BASE,10,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B0_10,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B0_10},
{GPIO7_BASE,17,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B1_01,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B1_01,LPUART4_BASE},
{GPIO7_BASE,16,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B1_00,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B1_00,LPUART4_BASE},
{GPIO7_BASE,11,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B0_11,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B0_11},
{GPIO7_BASE,0,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B0_00,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B0_00},
{GPIO7_BASE,2,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B0_02,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B0_02},
{GPIO7_BASE,1,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B0_01,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B0_01},
{GPIO7_BASE,3,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B0_03,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B0_03},
{GPIO6_BASE,18,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_02,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_02,LPUART2_BASE,7},
{GPIO6_BASE,19,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_03,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_03,LPUART2_BASE,8},
{GPIO6_BASE,23,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_07,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_07,LPUART3_BASE,12},
{GPIO6_BASE,22,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_06,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_06,LPUART3_BASE,11},
{GPIO6_BASE,17,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_01,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_01,0,6},
{GPIO6_BASE,16,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_00,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_00,0,5},
{GPIO6_BASE,26,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_10,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_10,LPUART8_BASE,15},
{GPIO6_BASE,27,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_11,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_11,LPUART8_BASE,0},
{GPIO6_BASE,24,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_08,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_08,0,13},
{GPIO6_BASE,25,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_09,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_09,0,14},
{GPIO6_BASE,12,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B0_12,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B0_12,LPUART1_BASE},
{GPIO6_BASE,13,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B0_13,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B0_13,LPUART1_BASE},
{GPIO6_BASE,30,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_14,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_14},
{GPIO6_BASE,31,kIOMUXC_SW_MUX_CTL_PAD_GPIO_AD_B1_15,kIOMUXC_SW_PAD_CTL_PAD_GPIO_AD_B1_15},
{GPIO8_BASE,18,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_32,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_32,LPUART7_BASE},
{GPIO9_BASE,31,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_31,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_31,LPUART7_BASE},
{GPIO8_BASE,23,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_37,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_37},
{GPIO8_BASE,22,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_36,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_36},
{GPIO7_BASE,12,kIOMUXC_SW_MUX_CTL_PAD_GPIO_B0_12,kIOMUXC_SW_PAD_CTL_PAD_GPIO_B0_12},
{GPIO9_BASE,7,kIOMUXC_SW_MUX_CTL_PAD_GPIO_EMC_07,kIOMUXC_SW_PAD_CTL_PAD_GPIO_EMC_07},
{GPIO8_BASE,15,kIOMUXC_SW_MUX_CTL_PAD_GPIO_SD_B0_03,kIOMUXC_SW_PAD_CTL_PAD_GPIO_SD_B0_03},
{GPIO8_BASE,14,kIOMUXC_SW_MUX_CTL_PAD_GPIO_SD_B0_02,kIOMUXC_SW_PAD_CTL_PAD_GPIO_SD_B0_02},
{GPIO8_BASE,13,kIOMUXC_SW_MUX_CTL_PAD_GPIO_SD_B0_01,kIOMUXC_SW_PAD_CTL_PAD_GPIO_SD_B0_01},
{GPIO8_BASE,12,kIOMUXC_SW_MUX_CTL_PAD_GPIO_SD_B0_00,kIOMUXC_SW_PAD_CTL_PAD_GPIO_SD_B0_00},
{GPIO8_BASE,17,kIOMUXC_SW_MUX_CTL_PAD_GPIO_SD_B0_05,kIOMUXC_SW_PAD_CTL_PAD_GPIO_SD_B0_05},
{GPIO8_BASE,16,kIOMUXC_SW_MUX_CTL_PAD_GPIO_SD_B0_04,kIOMUXC_SW_PAD_CTL_PAD_GPIO_SD_B0_04}
};
/**
* @brief Function to set a IO multiplex register to a mode using a mask.
* @details notice that the first three bits are always set to zero when writing in this register, so set those bits as well.
*
* @param n Index in the IOMUXC SW_MUX_CTL_PAD array corresponding to the register adresses from the manufacturer file.
* @param mask The mask that needs to be written to the register. 0b0101 to set it to GPIO for example. Read the reference manual for information on this.
*/
inline void writeIOMUXMUXCTL(int n, uint32_t mask)
{
IOMUXC->SW_MUX_CTL_PAD[n] &= ~(0b111);
IOMUXC->SW_MUX_CTL_PAD[n] |= mask;
}
/**
* @brief Function to set an IO multiplex config register to a mode using a mask.
* @details Notice that the whole registerd is set to 0 except the reserved adresses, in other words, every bit needs to be set to a desired value.
*
* @param n Index in the IOMUXC SW_PAD_CTL_PAD array corresponding to the register adresses from the manufacturer file.
* @param mask The mask that needs to be written to the register. Read the reference manual for information on this.
*/
inline void writeIOMUXPADCTL(int n, uint32_t mask)
{
uint32_t clearMask = ~((0b111111 << 10) | (0b11111 << 3) | 0b1);
IOMUXC->SW_PAD_CTL_PAD[n] &= clearMask;
IOMUXC->SW_PAD_CTL_PAD[n] |= mask;
}
/// the number of ticks per us
int_fast64_t HWLIB_WEAK ticks_per_us()
{
return 600; // this number should be the same as the cpu freq in Mhz, do not touch if you do not know what you are doing
}
uint_fast64_t HWLIB_WEAK now_ticks()
{
static bool init_done = false;
if (!init_done)
{
// EFC0->EEFC_FMR = EEFC_FMR_FWS(4);
// EFC1->EEFC_FMR = EEFC_FMR_FWS(4);
SysTick->CTRL = 0; // stop the timer
SysTick->LOAD = 0xFFFFFF; // use its as a 24-bit timer
SysTick->VAL = 0; // clear the timer
SysTick->CTRL = 5; // start the timer, 1:1
init_done = true;
}
static unsigned int last_low = 0;
static unsigned long long int high = 0;
// the timer ticks down, but we want an up counter
unsigned int low = 0xFFFFFF - (SysTick->VAL & 0xFFFFFF);
if (low < last_low)
{
// the timer rolled over, so increment the high part
high += 0x1ULL << 24;
}
last_low = low;
// return the aggregated ticks value
// the counter runs at 84 MHz
return (low | high);
}
} // namespace mimxrt1062
#endif //HWLIB_MIMXRT1062
<|start_filename|>library/targets/hwlib-stm32f1xx.hpp<|end_filename|>
// ==========================================================================
//
// File : hwlib-stm32f103c8.hpp
// Part of : C++ hwlib library for close-to-the-hardware OO programming
// Copyright : <EMAIL> 2017
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// ==========================================================================
// this file contains Doxygen lines
/// @file
#ifndef HWLIB_STM32F1xx_H
#define HWLIB_STM32F1xx_H
#include HWLIB_INCLUDE(../hwlib-all.hpp)
// the STM header files use 'register' in the pre-C++17 sense
//#define register
#include "stm32f103xb.h"
//#undef register
/// \brief
/// hwlib HAL for the stm32f1xx chips
///
/// This namespace contains the hwlib implementation of the pins, timing
/// and (software) UART output.
///
/// Initially, the chip runs from its HSI internal RC oscillator at 8 MHz.
/// The first wait call configures the chip to run at 72 MHz,
/// assuming an 8 MHz crystal.
///
/// The chip runs at 3.3 Volt and that is the level on its IO pins.
///
/// References:
/// - <A HREF="http://www.st.com/content/ccc/resource/technical/document/reference_manual/59/b9/ba/7f/11/af/43/d5/CD00171190.pdf/files/CD00171190.pdf/jcr:content/translations/en.CD00171190.pdf">
/// RM0008 STM32F1xxx reference manual</A> (pdf)
///
namespace stm32f1xx {
// the
// - enum class pins
// - struct pin_info_type
// - pin_info array
// must have been declared before this file is included
/// \cond INTERNAL
GPIO_TypeDef &__attribute__((weak)) port_registers(uint32_t port) {
// a bit of a cludge to put this here:
// enable the clock to all GPIO ports
RCC->APB2ENR |=
RCC_APB2ENR_IOPAEN | RCC_APB2ENR_IOPBEN |
RCC_APB2ENR_IOPCEN | RCC_APB2ENR_IOPDEN;
switch (port) {
case 0 :
return *GPIOA;
case 1 :
return *GPIOB;
case 2 :
return *GPIOC;
case 3 :
return *GPIOD;
default :
break;
}
// doesn't return
HWLIB_PANIC_WITH_LOCATION;
}
class pin_base {
public:
volatile GPIO_TypeDef &port;
volatile uint32_t &config_word;
uint32_t pin;
uint32_t config_offset;
uint32_t mask;
void config(uint32_t conf) {
config_word &= ~(0xF << config_offset);
config_word |= conf << config_offset;
}
pin_base( uint32_t port_number, uint32_t pin_number, uint32_t conf ):
port{ port_registers( port_number ) },
config_word{ ( pin_number < 8 ) ? port.CRL : port.CRH },
pin{ pin_number },
config_offset{ 4 * (pin_number % 8) },
mask{ 0x1U << pin_number }
{
config( conf );
// a15 = JTDI pin
if(( port_number == 0 ) && ( pin_number == 15 )){
RCC->APB2ENR |= RCC_APB2ENR_AFIOEN;
AFIO->MAPR |= AFIO_MAPR_SWJ_CFG_1;
}
// b4 = NJTRST
if(( port_number == 1 ) && ( pin_number == 4 )){
RCC->APB2ENR = RCC->APB2ENR | 0b01; // Enable AFIO clock
AFIO->MAPR = ( AFIO->MAPR & ( ~ ( 0b111 <<24 ))) | ( 0b001 << 24 ); // JTA+SW-DP, but without NJRST
}
}
bool base_read() {
return ((port.IDR & mask) != 0);
}
void base_write(bool v) {
port.BSRR |= (v ? mask : (mask << 16));
}
};
/// \endcond
/// pin_in implementation for an stm32f103c8
class pin_in : public hwlib::pin_in, private pin_base {
public:
/// stm32f103c8 pin_in constructor
///
/// Construct an stm32f103c8 input pin
/// from its port and pin numbers.
///
/// This constructor sets the pin direction to input.
/// By default, the internal weak pull-up is enabled.
pin_in(uint32_t port_number, uint32_t pin_number) :
pin_base{port_number, pin_number, 0x08} {}
/// stm32f103c8 pin_in constructor
///
/// Construct an stm32f103c8 input pin
/// from its blue-pill pin name.
///
/// This constructor sets the pin direction to input.
/// By default, the internal weak pull-up is enabled.
pin_in(pins name) :
pin_in{
pin_info(name).port,
pin_info(name).pin
} {}
bool read() override {
return base_read();
}
void refresh() override {}
};
/// pin_out implementation for an stm32f103c8
class pin_out : public hwlib::pin_out, private pin_base {
public:
/// stm32f103c8 pin_out constructor
///
/// Construct an stm32f103c8 output pin
/// from its port and pin numbers.
///
/// This constructor sets the pin direction to output.
///
/// This constructor doesn't set the pin value
/// to high or low, the set function must
/// be called to do so.
pin_out(uint32_t port_number, uint32_t pin_number) :
pin_base{port_number, pin_number, 0x03} {}
/// stm32f103c8 pin_out constructor
///
/// Construct an stm32f103c8 output pin
/// from its blue-pill pin name.
///
/// This constructor sets the pin direction to output.
///
/// This constructor doesn't set the pin value
/// to high or low, the set function must
/// be called to do so.
pin_out(pins name) :
pin_out{
pin_info(name).port,
pin_info(name).pin
} {}
void write(bool v) override {
base_write(v);
}
void flush() override {}
};
/// pin_in_out implementation for an stm32f103c8
class pin_in_out : public hwlib::pin_in_out, private pin_base {
public:
/// stm32f103c8 pin_out constructor
///
/// Construct an stm32f103c8 input/output pin
/// from its port and pin numbers.
///
/// This constructor doesn't set the pin direction
/// to input or output, a direction_set function must
/// be called to do so.
///
/// This constructor doesn't set the pin value
/// to high or low, the set function must
/// be called to do so.
pin_in_out(uint32_t port_number, uint32_t pin_number) :
pin_base{port_number, pin_number, 0x08} {}
/// stm32f103c8 pin_out constructor
///
/// Construct an stm32f103c8 input/output pin
/// from its blue-pill pin name.
///
/// This constructor doesn't set the pin direction
/// to input or output, a direction_set function must
/// be called to do so.
///
/// This constructor doesn't set the pin value
/// to high or low, the set function must
/// be called to do so.
pin_in_out(pins name) :
pin_in_out{
pin_info(name).port,
pin_info(name).pin
} {}
void direction_set_input() override {
config(0x08);
}
bool read() override {
return base_read();
}
void direction_set_output() override {
config(0x03);
}
void write(bool v) override {
base_write(v);
}
void flush() override {}
void refresh() override {}
void direction_flush() override {}
};
/// 36kHz output on pin chip PA6 (blue pill A6)
///
/// This class provides a 36 kHz output on chip pin PA6
/// that can be enabled or disabled by calling
/// write( 1 ) resp. write( 0 ).
class a6_36kHz : public hwlib::pin_out {
public:
/// create the 36kHz output
a6_36kHz() {
RCC->APB2ENR |= RCC_APB2ENR_IOPAEN; // Enable GPIO port a
RCC->APB1ENR |= RCC_APB1ENR_TIM3EN; // Enable Timer 3
// this resets the other pins to theirv default!!!
GPIOA->CRL = GPIO_CRL_MODE6 | GPIO_CRL_CNF6_1; // Set Pin A6 to alternate function Push-Pull
TIM3->ARR = 221; // Auto reload value
TIM3->PSC = 0; // Capture Compare preload, for duty cycle
TIM3->CCR1 = 111;
TIM3->CCMR1 = TIM_CCMR1_OC1M | TIM_CCMR1_OC1PE;
TIM3->EGR |= TIM_EGR_UG; // Trigger update for Timer 3 (to preload)
write(0);
TIM3->CR1 |= TIM_CR1_ARPE; // Enable auto- reload preload on Timer 3
TIM3->CR1 |= TIM_CR1_CEN; // Enable Timer 3
}
/// enable or disable the 36 kHz output
//
/// Calling write( 1 ) enables the 36 kHz output, calling write( 0 )
/// disables the output and makes the output low.
void write(bool b) override {
if (b) {
TIM3->CCER |= TIM_CCER_CC1E;
} else {
TIM3->CCER &= ~TIM_CCER_CC1E;
}
}
void flush() override {};
}; // class a6_36kHz
class spi_bus_hardware : public hwlib::spi_bus {
private:
void write_and_read(
const size_t n,
const uint8_t data_out[],
uint8_t data_in[]
) override {
for( uint_fast8_t i = 0; i < n; i++ ) {
if (data_out != nullptr) {
SPI1->DR = (uint32_t)data_out[i]; //Write a byte to SPI hardware
while (!(SPI1->SR & SPI_SR_TXE)) { // Wait for byte to be processed by SPI hardware
hwlib::wait_ns_busy(1);
}
}
if (data_in != nullptr) {
while (!SPI1->SR & SPI_SR_RXNE) { //Wait for byte to be received
hwlib::wait_ns_busy(1);
}
data_in[i] = SPI1->DR;
*data_in++;
}
}
}
public:
/// construct a hardware spi bus for the stm32f1xx
///
/// This constructor creates a hardware spi bus
/// on the default spi1 pins.
///
///
/// When the SPI bus is used for either only writing or only reading,
/// the unused pin argument can be specified as pin_out_dummy or
/// pin_in_dummy.
///
///
/// Default pins are:
/// SCK: A5
/// MOSI: A7
/// NSS: A4
/// MISO: A6
spi_bus_hardware(
){
// Setup all the ports needed by the SPI bus
RCC->APB2ENR |= RCC_APB2ENR_SPI1EN; //enable clock signal to spi
RCC->APB2ENR |= RCC_APB2ENR_IOPAEN; //enable clock signal to peripheralport A (this is were spi ports reside)
GPIOA->CRL &= ~(GPIO_CRL_CNF5 | GPIO_CRL_MODE5);//sck
GPIOA->CRL &= ~(GPIO_CRL_CNF7 | GPIO_CRL_MODE7);//mosi
GPIOA->CRL &= ~(GPIO_CRL_CNF4 | GPIO_CRL_MODE4);//nss
GPIOA->CRL &= ~(GPIO_CRL_CNF6 | GPIO_CRL_MODE6);// miso
GPIOA->CRL |= GPIO_CRL_MODE5_0 | GPIO_CRL_MODE5_1; //sck
GPIOA->CRL |= GPIO_CRL_CNF5_1;
GPIOA->CRL |= GPIO_CRL_MODE7_0 | GPIO_CRL_MODE7_1; //mosi
GPIOA->CRL |= GPIO_CRL_CNF7_1;
GPIOA->CRL |= GPIO_CRL_MODE4_0 | GPIO_CRL_MODE4_1; //nss
GPIOA->CRL |= GPIO_CRL_CNF4_1;
GPIOA->CRL |= GPIO_CRL_CNF6_0; // miso
SPI1->CR1 |= SPI_CR1_CPOL; //Invert clock polarity
SPI1->CR2 |= SPI_CR2_SSOE; //Slave select will output
SPI1->CR1 |= SPI_CR1_MSTR | SPI_CR1_SPE; //Make us the master and enable spi
}
};
/// pin_oc implementation for an stm32f103c8
class pin_oc : public hwlib::pin_oc, private pin_base {
public:
/// stm32f103c8 pin_oc constructor
///
/// Construct an stm32f103c8 open-collector pin
/// from its port and pin numbers.
///
/// This constructor doesn't set the pin value
/// to high or low, the set function must
/// be called to do so.
pin_oc(uint32_t port_number, uint32_t pin_number) :
pin_base{port_number, pin_number, 0x07} {}
/// stm32f103c8 pin_oc constructor
///
/// Construct an stm32f103c8 open-collector pin
/// from its blue-pill pin name.
///
/// This constructor doesn't set the pin value
/// to high or low, the set function must
/// be called to do so.
pin_oc(pins name) :
pin_oc{
pin_info(name).port,
pin_info(name).pin
} {}
bool read() override {
return base_read();
}
void write(bool v) override {
base_write(v);
}
void flush() override {}
void refresh() override {}
};
/// the number of ticks per us
uint_fast64_t HWLIB_WEAK ticks_per_us() {
return 64; //72;
}
/// returns the number of ticks since some fixed starting point
uint_fast64_t HWLIB_WEAK now_ticks() {
static bool init_done = false;
if (!init_done) {
// switch to the 72 MHz crystal/PLL clock, from stm32x.cpp,
// some values taken from
// https://github.com/rogerclarkmelbourne/STM32duino-bootloader
// Flash 2 wait state
FLASH->ACR &= (uint32_t) ((uint32_t) ~FLASH_ACR_LATENCY);
FLASH->ACR |= (uint32_t) FLASH_ACR_LATENCY_2;
// Enable Prefetch Buffer
FLASH->ACR |= FLASH_ACR_PRFTBE;
// enable HSE and wait for it
RCC->CR |= RCC_CR_HSEON;
while ((RCC->CR & RCC_CR_HSERDY) == 0) {}
// PLL configuration: PLLCLK = HSE * 9 = 72 MHz
RCC->CFGR &= (uint32_t) ((uint32_t) ~(
RCC_CFGR_PLLSRC | RCC_CFGR_PLLXTPRE | RCC_CFGR_PLLMULL));
RCC->CFGR |= (uint32_t) (RCC_CFGR_PLLSRC | RCC_CFGR_PLLMULL4);
// Enable PLL and wait for it
RCC->CR |= RCC_CR_PLLON;
while ((RCC->CR & RCC_CR_PLLRDY) == 0) {}
// HCLK = SYSCLK
RCC->CFGR |= (uint32_t) RCC_CFGR_HPRE_DIV1;
// PCLK2 = HCLK
RCC->CFGR |= (uint32_t) RCC_CFGR_PPRE2_DIV1;
// PCLK1 = HCLK / 2
RCC->CFGR |= (uint32_t) RCC_CFGR_PPRE1_DIV2;
// Select PLL as system clock source
RCC->CFGR &= (uint32_t) ((uint32_t) ~(RCC_CFGR_SW));
RCC->CFGR |= (uint32_t) RCC_CFGR_SW_PLL;
// Wait till PLL is used as system clock source
while ((RCC->CFGR & (uint32_t) RCC_CFGR_SWS) != (uint32_t) 0x08) {}
// start the systick timer
SysTick->CTRL = 0; // stop the timer
SysTick->LOAD = 0xFFFFFF; // use its as a 24-bit timer
SysTick->VAL = 0; // clear the timer
SysTick->CTRL = 5; // start the timer, 1:1
init_done = true;
}
static unsigned int last_low = 0;
static unsigned long long int high = 0;
// the timer ticks down, but we want an up counter
unsigned int low = 0xFFFFFF - (SysTick->VAL & 0xFFFFFF);
if (low < last_low) {
// the timer rolled over, so increment the high part
high += 0x1ULL << 24;
}
last_low = low;
// return the aggregated ticks value
// the counter runs at 1 MHz
return (low | high);
}
/// \cond INTERNAL
void uart_init();
/// \endcond
bool uart_char_available();
char uart_getc();
void uart_putc(char c);
#ifdef _HWLIB_ONCE
// If hwlib gets to cpp20 minimum make this consteval.
constexpr uint32_t calculateBoutRate(long long bout) {
long long fck = 64000000; // PCLK2 is getting 64 MHz
fck *= 100; //doing fck x100 so we don't get floats.
long long usartdiv = (fck/bout)/16;
unsigned int mantissa = std::round(usartdiv/100);
unsigned int devider = std::round(((usartdiv-(mantissa*100))*16)/100);
uint32_t baudrateReg = mantissa<<4u | devider;
return baudrateReg;
}
void uart_init() {
static bool init_done = false;
if (init_done) {
return;
}
init_done = true;
RCC->APB2ENR |= RCC_APB2ENR_IOPAEN; // enable GPIOA clock
RCC->APB2ENR |= RCC_APB2ENR_USART1EN; // enable USART1 clock
GPIOA->CRH &= ~(GPIO_CRH_CNF9 | GPIO_CRH_MODE9); // reset PA9
GPIOA->CRH &= ~(GPIO_CRH_CNF10 | GPIO_CRH_MODE10); // reset PA10
GPIOA->CRH |= GPIO_CRH_MODE9_1 | GPIO_CRH_MODE9_0; // 0b11 50MHz output
GPIOA->CRH |= GPIO_CRH_CNF9_1; // PA9: output @ 50MHz - Alt-function Push-pull
GPIOA->CRH |= GPIO_CRH_CNF10_0; // PA10 RX - Mode = 0b00 (input) - CNF = 0b01 (input floating)
USART1->BRR = calculateBoutRate(HWLIB_BAUDRATE);
// configure USART1 registers
USART1->CR1 = USART_CR1_TE | USART_CR1_RE | USART_CR1_UE;
}
bool uart_char_available() {
uart_init();
return (USART1->SR & USART_SR_RXNE_Msk);
}
char uart_getc() {
// uart_init() is not needed because uart_char_available does that
while (!uart_char_available()) {
hwlib::background::do_background_work();
}
return USART1->DR;
}
void uart_putc(char c) {
uart_init();
while (!(USART1->SR & USART_SR_TXE_Msk)) {
hwlib::background::do_background_work();
}
USART1->DR = c;
}
#endif
}; // namespace stm32f1xx
namespace hwlib {
void wait_ns(int_fast32_t n);
void wait_us(int_fast32_t n);
void wait_ms(int_fast32_t n);
void wait_ns_busy(int_fast32_t n);
void wait_us_busy(int_fast32_t n);
void wait_ms_busy(int_fast32_t n);
#define HWLIB_USE_HW_UART
#ifdef HWLIB_USE_HW_UART
void HWLIB_WEAK uart_putc(char c) {
stm32f1xx::uart_putc(c);
}
bool HWLIB_WEAK uart_char_available() {
return stm32f1xx::uart_char_available();
}
char HWLIB_WEAK uart_getc() {
return stm32f1xx::uart_getc();
}
#else
void HWLIB_WEAK uart_putc( char c ){
static target::pin_out pin( 0, 9 );
uart_putc_bit_banged_pin( c, pin );
}
bool HWLIB_WEAK uart_char_available(){
static target::pin_in pin( 0, 8 );
return ! pin.read();
}
char HWLIB_WEAK uart_getc( ){
static target::pin_in pin( 0, 8 );
return uart_getc_bit_banged_pin( pin );
}
#endif
#ifdef _HWLIB_ONCE
uint64_t now_ticks() {
return stm32f1xx::now_ticks();
}
uint64_t ticks_per_us() {
return stm32f1xx::ticks_per_us();
}
uint64_t now_us() {
return now_ticks() / ticks_per_us();
}
// Busy waits
void wait_ns_busy(int_fast32_t n) {
wait_us_busy((n + 999) / 1000);
}
void wait_us_busy(int_fast32_t n) {
auto end = now_us() + n;
while (now_us() < end) {}
}
void wait_ms_busy(int_fast32_t n) {
while (n > 0) {
wait_us_busy(1000);
--n;
}
}
void HWLIB_WEAK wait_ns(int_fast32_t n) {
wait_us((n + 999) / 1000);
}
void HWLIB_WEAK wait_us(int_fast32_t n) {
auto end = now_us() + n;
while (now_us() < end) {
background::do_background_work();
}
}
void HWLIB_WEAK wait_ms(int_fast32_t n) {
while (n > 0) {
wait_us(1000);
--n;
}
}
#endif
}; //namespace hwlib
#endif // #ifdef HWLIB_STM32F1xx_H
<|start_filename|>library/targets/hwlib-arduino-uno.hpp<|end_filename|>
// ==========================================================================
//
// File : hwlib-arduino-uno.hpp
// Part of : C++ hwlib library for close-to-the-hardware OO programming
// Copyright : <EMAIL> 2017-2019
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// ==========================================================================
// this file contains Doxygen lines
/// @file
#ifndef HWLIB_UNO_H
#define HWLIB_UNO_H
#include "hwlib-atmega328.hpp"
namespace uno {
/// Arduino Uno pin names
///
/// These are the pins of an Arduino Uno board.
/// Digital pins d0..d13, analog input pins A0..A5,
/// SCL, SDA, TX (=D1), RX (=D0),
/// LED (=D13), SCK (=D13), MISO (=D12), MOSI (=D11), SS (=D10).
enum class pins {
d0, d1, d2, d3, d4, d5, d6, d7, d8, d9, d10, d11, d12, d13,
a0, a1, a2, a3, a4, a5,
scl, sda, tx, rx, led,
sck, miso, mosi, ss
/// \cond INTERNAL
,SIZE_THIS_IS_NOT_A_PIN
/// \endcond
};
/// \cond INTERNAL
struct pin_info_type {
uint8_t port;
uint8_t pin;
};
const pin_info_type pin_info_array[ (int) pins::SIZE_THIS_IS_NOT_A_PIN ] = {
{ 3, 0 }, // d0
{ 3, 1 }, // d1
{ 3, 2 }, // d2
{ 3, 3 }, // d3
{ 3, 4 }, // d4
{ 3, 5 }, // d5
{ 3, 6 }, // d6
{ 3, 7 }, // d7
{ 1, 0 }, // d8
{ 1, 1 }, // d9
{ 1, 2 }, // d10
{ 1, 3 }, // d11
{ 1, 4 }, // d12
{ 1, 5 }, // d13
{ 2, 0 }, // a0
{ 2, 1 }, // a1
{ 2, 2 }, // a2
{ 2, 3 }, // a3
{ 2, 4 }, // a4
{ 2, 5 }, // a5
{ 2, 5 }, // scl
{ 2, 4 }, // sda
{ 3, 0 }, // tx
{ 3, 1 }, // rx
{ 1, 5 }, // led
{ 1, 5 }, // sck
{ 1, 4 }, // miso
{ 1, 3 }, // mosi
{ 1, 2 }, // ss
};
const pin_info_type & HWLIB_WEAK pin_info( pins name ){
uint_fast8_t n = static_cast< uint_fast8_t>( name );
if( n >= static_cast< uint_fast8_t>( pins::SIZE_THIS_IS_NOT_A_PIN )){
HWLIB_PANIC_WITH_LOCATION;
}
return pin_info_array[ n ];
}
using pin_adc = atmega328::pin_adc;
class pin_in : public atmega328::pin_in {
public:
/// pin_in constructor from ATMega328P port/pin numbers
///
/// This call creates a pin_in from an ATMega328P port/pin
/// number pair.
///
/// This constructor sets the pin direction to input.
pin_in( uint8_t port_number, uint8_t pin_number ):
atmega328::pin_in( port_number, pin_number )
{}
/// Arduino Uno pin_in constructor from an Uno pin name
///
/// This call creates a pin_in from an Arduino Uno pin name.
///
/// This constructor sets the pin direction to input.
pin_in( pins name ):
pin_in{
pin_info( name ).port,
pin_info( name ).pin
}
{}
};
class pin_out : public atmega328::pin_out {
public:
/// pin_out constructor from ATMega328P port/pin numbers
///
/// This call creates a pin_out from an ATMega328P port/pin
/// number pair.
///
/// This constructor sets the pin direction to output.
pin_out( uint8_t port_number, uint8_t pin_number ):
atmega328::pin_out( port_number, pin_number )
{}
/// Arduino Uno pin_out constructor from an Uno pin name
///
/// This call creates a pin_out from an Arduino Uno pin name.
///
/// This constructor sets the pin direction to output.
pin_out( pins name ):
pin_out{
pin_info( name ).port,
pin_info( name ).pin
}
{}
};
class pin_in_out : public atmega328::pin_in_out {
public:
/// ATmega pin_in_out constructor
///
/// Constructor for a ATMega328P input/output pin.
///
/// The port_number and pin_number refer to the chip,
/// not to the Arduino board pin names.
///
/// This constructor doesn't set the pin direction
/// to input or output, a direction_set function must
/// be called to do so.
pin_in_out( uint8_t port_number, uint8_t pin_number ):
atmega328::pin_in_out( port_number, pin_number )
{}
/// Arduino Uno pin_in_out constructor from an Uno pin name
///
/// This call creates a pin_in_out from an Arduino Uno pin name.
///
/// This constructor doesn't set the pin direction
/// to input or output, a direction_set function must
/// be called to do so.
pin_in_out( pins name ):
pin_in_out{
pin_info( name ).port,
pin_info( name ).pin
}
{}
};
class pin_oc : public atmega328::pin_oc {
public:
/// Arduino Uno pin_oc constructor
///
/// Constructor for a ATMega328P input pin.
///
/// The port_number and pin_number refer to the chip,
/// not to the Arduino board pin names.
///
/// This constructor sets the pin to high (high-impedance).
pin_oc( uint8_t port_number, uint8_t pin_number ):
atmega328::pin_oc( port_number, pin_number )
{}
/// Arduino Uno pin_oc constructor from an Uno pin name
///
/// This call creates a pin_oc from an Arduino Uno pin name.
///
/// This constructor sets the pin to high (high-impedance).
pin_oc( pins name ):
pin_oc{
pin_info( name ).port,
pin_info( name ).pin
}
{}
};
}; // namespace uno
namespace hwlib {
namespace target = ::uno;
const auto target_board = target_boards::arduino_uno;
}; //namespace hwlib
#endif // HWLIB_UNO_H
| Lennart99/hwlib |
<|start_filename|>pkg/pgtune/misc.go<|end_filename|>
package pgtune
import (
"fmt"
"math"
"runtime"
"github.com/timescale/timescaledb-tune/internal/parse"
"github.com/timescale/timescaledb-tune/pkg/pgutils"
)
// Keys in the conf file that are tunable but not in the other groupings
const (
CheckpointKey = "checkpoint_completion_target"
StatsTargetKey = "default_statistics_target"
MaxConnectionsKey = "max_connections"
RandomPageCostKey = "random_page_cost"
MaxLocksPerTxKey = "max_locks_per_transaction"
AutovacuumMaxWorkersKey = "autovacuum_max_workers"
AutovacuumNaptimeKey = "autovacuum_naptime"
EffectiveIOKey = "effective_io_concurrency" // linux only
checkpointDefault = "0.9"
statsTargetDefault = "500"
randomPageCostDefault = "1.1"
autovacuumMaxWorkersDefault = "10"
autovacuumNaptimeDefault = "10"
// effective io concurrency has changed in v13: https://www.postgresql.org/docs/13/release-13.html
// However, our previous value of 200 is translated to 1176, which seems excessively high
// (the upper limit is 1000. For the SSDs we'll follow up the wise man's advice here:
// https://www.postgresql.org/message-id/20210422195232.GA25061%40momjian.us
effectiveIODefaultOldVersions = "200"
effectiveIODefault = "256"
// If you want to lower this value, consider that Patroni will not accept anything less than 25 as
// a valid max_connections and will replace it with 100, per
// https://github.com/zalando/patroni/blob/00cc62726d6df25d31f9b0baa082c83cd3f7bef9/patroni/postgresql/config.py#L280
minMaxConns = 25
)
// MaxConnectionsDefault is the recommended default value for max_connections.
const MaxConnectionsDefault uint64 = 100
// MaxBackgroundWorkersDefault is the recommended default value for timescaledb.max_background_workers.
const MaxBackgroundWorkersDefault int = 8
// getMaxConns gives a default amount of connections based on a memory step
// function.
func getMaxConns(totalMemory uint64) uint64 {
switch {
case totalMemory <= 2*parse.Gigabyte:
return minMaxConns
case totalMemory <= 4*parse.Gigabyte:
return 50
case totalMemory <= 6*parse.Gigabyte:
return 75
default:
return MaxConnectionsDefault
}
}
func getEffectiveIOConcurrency(pgMajorVersion string) string {
switch pgMajorVersion {
case pgutils.MajorVersion96,
pgutils.MajorVersion10,
pgutils.MajorVersion11,
pgutils.MajorVersion12:
return effectiveIODefaultOldVersions
}
return effectiveIODefault
}
// maxLocksValues gives the number of locks for a power-2 memory starting
// with sub-8GB. i.e.:
// < 8GB = 64
// >=8GB, < 16GB = 128
// >=16GB, < 32GB = 256
// >=32GB = 512
var maxLocksValues = []string{"64", "128", "256", "512"}
// MiscLabel is the label used to refer to the miscellaneous settings group
const MiscLabel = "miscellaneous"
// MiscKeys is an array of miscellaneous keys that are tunable
var MiscKeys = []string{
StatsTargetKey,
RandomPageCostKey,
CheckpointKey,
MaxConnectionsKey,
MaxLocksPerTxKey,
AutovacuumMaxWorkersKey,
AutovacuumNaptimeKey,
EffectiveIOKey,
}
// MiscRecommender gives recommendations for MiscKeys based on system resources.
type MiscRecommender struct {
totalMemory uint64
maxConns uint64
pgMajorVersion string
}
// NewMiscRecommender returns a MiscRecommender (unaffected by system resources).
func NewMiscRecommender(totalMemory, maxConns uint64, pgMajorVersion string) *MiscRecommender {
return &MiscRecommender{totalMemory, maxConns, pgMajorVersion}
}
// IsAvailable returns whether this Recommender is usable given the system resources. Always true.
func (r *MiscRecommender) IsAvailable() bool {
return true
}
// Recommend returns the recommended PostgreSQL formatted value for the conf
// file for a given key.
func (r *MiscRecommender) Recommend(key string) string {
var val string
if key == CheckpointKey {
val = checkpointDefault
} else if key == StatsTargetKey {
val = statsTargetDefault
} else if key == MaxConnectionsKey {
conns := getMaxConns(r.totalMemory)
if r.maxConns != 0 {
conns = r.maxConns
}
val = fmt.Sprintf("%d", conns)
} else if key == RandomPageCostKey {
val = randomPageCostDefault
} else if key == MaxLocksPerTxKey {
for i := len(maxLocksValues) - 1; i >= 1; i-- {
limit := uint64(math.Pow(2.0, float64(2+i)))
if r.totalMemory >= limit*parse.Gigabyte {
return maxLocksValues[i]
}
}
return maxLocksValues[0]
} else if key == AutovacuumMaxWorkersKey {
val = autovacuumMaxWorkersDefault
} else if key == AutovacuumNaptimeKey {
val = autovacuumNaptimeDefault
} else if key == EffectiveIOKey {
val = getEffectiveIOConcurrency(r.pgMajorVersion)
} else {
panic(fmt.Sprintf("unknown key: %s", key))
}
return val
}
// MiscSettingsGroup is the SettingsGroup to represent settings that do not fit in other SettingsGroups.
type MiscSettingsGroup struct {
totalMemory uint64
maxConns uint64
pgMajorVersion string
}
// Label should always return the value MiscLabel.
func (sg *MiscSettingsGroup) Label() string { return MiscLabel }
// Keys should always return the MiscKeys slice.
func (sg *MiscSettingsGroup) Keys() []string {
if runtime.GOOS != "linux" {
return MiscKeys[:len(MiscKeys)-1]
}
return MiscKeys
}
// GetRecommender should return a new MiscRecommender.
func (sg *MiscSettingsGroup) GetRecommender() Recommender {
return NewMiscRecommender(sg.totalMemory, sg.maxConns, sg.pgMajorVersion)
}
<|start_filename|>pkg/pgutils/utils.go<|end_filename|>
package pgutils
import (
"fmt"
"os/exec"
"regexp"
)
// Major version strings for recent PostgreSQL versions
const (
MajorVersion96 = "9.6"
MajorVersion10 = "10"
MajorVersion11 = "11"
MajorVersion12 = "12"
MajorVersion13 = "13"
MajorVersion14 = "14"
)
const (
defaultBinName = "pg_config"
versionFlag = "--version"
errCouldNotParseVersionFmt = "unable to parse PG version string: %s"
errUnknownMajorVersionFmt = "unknown major PG version: %s"
)
var (
pgVersionRegex = regexp.MustCompile("^PostgreSQL ([0-9]+?).([0-9]+?).*")
execFn = func(name string, args ...string) ([]byte, error) {
return exec.Command(name, args...).Output()
}
)
// ToPGMajorVersion returns the major PostgreSQL version associated with a given
// version string, as given from an invocation of `pg_config --version`. This
// string has the form of "PostgreSQL X.Y[.Z (extra)]". For versions before 10,
// the major version is defined as X.Y, whereas starting with 10, it is defined
// as just X. That is, "PostgreSQL 10.3" returns "10" and "PostgreSQL 9.6.4"
// returns "9.6".
func ToPGMajorVersion(val string) (string, error) {
res := pgVersionRegex.FindStringSubmatch(val)
if len(res) != 3 {
return "", fmt.Errorf(errCouldNotParseVersionFmt, val)
}
switch res[1] {
case MajorVersion10, MajorVersion11, MajorVersion12, MajorVersion13, MajorVersion14:
return res[1], nil
case "7", "8", "9":
return res[1] + "." + res[2], nil
default:
return "", fmt.Errorf(errUnknownMajorVersionFmt, val)
}
}
// GetPGConfigVersion executes the pg_config binary (assuming it is in PATH) to
// get the version of PostgreSQL associated with it.
func GetPGConfigVersion() (string, error) {
return GetPGConfigVersionAtPath(defaultBinName)
}
// GetPGConfigVersionAtPath executes the (pg_config) binary at path to get the
// version of PostgreSQL associated with it.
func GetPGConfigVersionAtPath(path string) (string, error) {
output, err := execFn(path, versionFlag)
if err != nil {
return "", err
}
return string(output), nil
}
| timescale/timescaledb-tune |
<|start_filename|>gedit/plugins/goto-definition-gedit3/ctags/tex.c<|end_filename|>
/*
* $Id: tex.c 666 2008-05-15 17:47:31Z dfishburn $
*
* Copyright (c) 2008, <NAME>
*
* This source code is released for free distribution under the terms of the
* GNU General Public License.
*
* This module contains functions for generating tags for TeX language files.
*
* Tex language reference:
* http://en.wikibooks.org/wiki/TeX#The_Structure_of_TeX
*/
/*
* INCLUDE FILES
*/
#include "general.h" /* must always come first */
#include <ctype.h> /* to define isalpha () */
#include <setjmp.h>
#ifdef DEBUG
#include <stdio.h>
#endif
#include "debug.h"
#include "entry.h"
#include "keyword.h"
#include "parse.h"
#include "read.h"
#include "routines.h"
#include "vstring.h"
/*
* MACROS
*/
#define isType(token,t) (boolean) ((token)->type == (t))
#define isKeyword(token,k) (boolean) ((token)->keyword == (k))
/*
* DATA DECLARATIONS
*/
typedef enum eException { ExceptionNone, ExceptionEOF } exception_t;
/*
* Used to specify type of keyword.
*/
typedef enum eKeywordId {
KEYWORD_NONE = -1,
KEYWORD_chapter,
KEYWORD_section,
KEYWORD_subsection,
KEYWORD_subsubsection,
KEYWORD_part,
KEYWORD_paragraph,
KEYWORD_subparagraph
} keywordId;
/* Used to determine whether keyword is valid for the token language and
* what its ID is.
*/
typedef struct sKeywordDesc {
const char *name;
keywordId id;
} keywordDesc;
typedef enum eTokenType {
TOKEN_UNDEFINED,
TOKEN_CHARACTER,
TOKEN_CLOSE_PAREN,
TOKEN_SEMICOLON,
TOKEN_COLON,
TOKEN_COMMA,
TOKEN_KEYWORD,
TOKEN_OPEN_PAREN,
TOKEN_OPERATOR,
TOKEN_IDENTIFIER,
TOKEN_STRING,
TOKEN_PERIOD,
TOKEN_OPEN_CURLY,
TOKEN_CLOSE_CURLY,
TOKEN_EQUAL_SIGN,
TOKEN_EXCLAMATION,
TOKEN_FORWARD_SLASH,
TOKEN_OPEN_SQUARE,
TOKEN_CLOSE_SQUARE,
TOKEN_OPEN_MXML,
TOKEN_CLOSE_MXML,
TOKEN_CLOSE_SGML,
TOKEN_LESS_THAN,
TOKEN_GREATER_THAN,
TOKEN_QUESTION_MARK,
TOKEN_STAR
} tokenType;
typedef struct sTokenInfo {
tokenType type;
keywordId keyword;
vString * string;
vString * scope;
unsigned long lineNumber;
fpos_t filePosition;
} tokenInfo;
/*
* DATA DEFINITIONS
*/
static langType Lang_js;
static jmp_buf Exception;
typedef enum {
TEXTAG_CHAPTER,
TEXTAG_SECTION,
TEXTAG_SUBSECTION,
TEXTAG_SUBSUBSECTION,
TEXTAG_PART,
TEXTAG_PARAGRAPH,
TEXTAG_SUBPARAGRAPH,
TEXTAG_COUNT
} texKind;
static kindOption TexKinds [] = {
{ TRUE, 'c', "chapter", "chapters" },
{ TRUE, 's', "section", "sections" },
{ TRUE, 'u', "subsection", "subsections" },
{ TRUE, 'b', "subsubsection", "subsubsections" },
{ TRUE, 'p', "part", "parts" },
{ TRUE, 'P', "paragraph", "paragraphs" },
{ TRUE, 'G', "subparagraph", "subparagraphs" }
};
static const keywordDesc TexKeywordTable [] = {
/* keyword keyword ID */
{ "chapter", KEYWORD_chapter },
{ "section", KEYWORD_section },
{ "subsection", KEYWORD_subsection },
{ "subsubsection", KEYWORD_subsubsection },
{ "part", KEYWORD_part },
{ "paragraph", KEYWORD_paragraph },
{ "subparagraph", KEYWORD_subparagraph }
};
/*
* FUNCTION DEFINITIONS
*/
static boolean isIdentChar (const int c)
{
return (boolean)
(isalpha (c) || isdigit (c) || c == '$' ||
c == '_' || c == '#');
}
static void buildTexKeywordHash (void)
{
const size_t count = sizeof (TexKeywordTable) /
sizeof (TexKeywordTable [0]);
size_t i;
for (i = 0 ; i < count ; ++i)
{
const keywordDesc* const p = &TexKeywordTable [i];
addKeyword (p->name, Lang_js, (int) p->id);
}
}
static tokenInfo *newToken (void)
{
tokenInfo *const token = xMalloc (1, tokenInfo);
token->type = TOKEN_UNDEFINED;
token->keyword = KEYWORD_NONE;
token->string = vStringNew ();
token->scope = vStringNew ();
token->lineNumber = getSourceLineNumber ();
token->filePosition = getInputFilePosition ();
return token;
}
static void deleteToken (tokenInfo *const token)
{
vStringDelete (token->string);
vStringDelete (token->scope);
eFree (token);
}
/*
* Tag generation functions
*/
static void makeConstTag (tokenInfo *const token, const texKind kind)
{
if (TexKinds [kind].enabled )
{
const char *const name = vStringValue (token->string);
tagEntryInfo e;
initTagEntry (&e, name);
e.lineNumber = token->lineNumber;
e.filePosition = token->filePosition;
e.kindName = TexKinds [kind].name;
e.kind = TexKinds [kind].letter;
makeTagEntry (&e);
}
}
static void makeTexTag (tokenInfo *const token, texKind kind)
{
vString * fulltag;
if (TexKinds [kind].enabled)
{
/*
* If a scope has been added to the token, change the token
* string to include the scope when making the tag.
*/
if ( vStringLength (token->scope) > 0 )
{
fulltag = vStringNew ();
vStringCopy (fulltag, token->scope);
vStringCatS (fulltag, ".");
vStringCatS (fulltag, vStringValue (token->string));
vStringTerminate (fulltag);
vStringCopy (token->string, fulltag);
vStringDelete (fulltag);
}
makeConstTag (token, kind);
}
}
/*
* Parsing functions
*/
static void parseString (vString *const string, const int delimiter)
{
boolean end = FALSE;
while (! end)
{
int c = fileGetc ();
if (c == EOF)
end = TRUE;
else if (c == '\\')
{
c = fileGetc(); /* This maybe a ' or ". */
vStringPut (string, c);
}
else if (c == delimiter)
end = TRUE;
else
vStringPut (string, c);
}
vStringTerminate (string);
}
/*
* Read a C identifier beginning with "firstChar" and places it into
* "name".
*/
static void parseIdentifier (vString *const string, const int firstChar)
{
int c = firstChar;
Assert (isIdentChar (c));
do
{
vStringPut (string, c);
c = fileGetc ();
} while (isIdentChar (c));
vStringTerminate (string);
if (!isspace (c))
fileUngetc (c); /* unget non-identifier character */
}
static void readToken (tokenInfo *const token)
{
int c;
token->type = TOKEN_UNDEFINED;
token->keyword = KEYWORD_NONE;
vStringClear (token->string);
getNextChar:
do
{
c = fileGetc ();
token->lineNumber = getSourceLineNumber ();
token->filePosition = getInputFilePosition ();
}
while (c == '\t' || c == ' ' || c == '\n');
switch (c)
{
case EOF: longjmp (Exception, (int)ExceptionEOF); break;
case '(': token->type = TOKEN_OPEN_PAREN; break;
case ')': token->type = TOKEN_CLOSE_PAREN; break;
case ';': token->type = TOKEN_SEMICOLON; break;
case ',': token->type = TOKEN_COMMA; break;
case '.': token->type = TOKEN_PERIOD; break;
case ':': token->type = TOKEN_COLON; break;
case '{': token->type = TOKEN_OPEN_CURLY; break;
case '}': token->type = TOKEN_CLOSE_CURLY; break;
case '=': token->type = TOKEN_EQUAL_SIGN; break;
case '[': token->type = TOKEN_OPEN_SQUARE; break;
case ']': token->type = TOKEN_CLOSE_SQUARE; break;
case '?': token->type = TOKEN_QUESTION_MARK; break;
case '*': token->type = TOKEN_STAR; break;
case '\'':
case '"':
token->type = TOKEN_STRING;
parseString (token->string, c);
token->lineNumber = getSourceLineNumber ();
token->filePosition = getInputFilePosition ();
break;
case '\\':
/*
* All Tex tags start with a backslash.
* Check if the next character is an alpha character
* else it is not a potential tex tag.
*/
c = fileGetc ();
if (! isalpha (c))
fileUngetc (c);
else
{
parseIdentifier (token->string, c);
token->lineNumber = getSourceLineNumber ();
token->filePosition = getInputFilePosition ();
token->keyword = analyzeToken (token->string, Lang_js);
if (isKeyword (token, KEYWORD_NONE))
token->type = TOKEN_IDENTIFIER;
else
token->type = TOKEN_KEYWORD;
}
break;
case '%':
fileSkipToCharacter ('\n'); /* % are single line comments */
goto getNextChar;
break;
default:
if (! isIdentChar (c))
token->type = TOKEN_UNDEFINED;
else
{
parseIdentifier (token->string, c);
token->lineNumber = getSourceLineNumber ();
token->filePosition = getInputFilePosition ();
token->type = TOKEN_IDENTIFIER;
}
break;
}
}
static void copyToken (tokenInfo *const dest, tokenInfo *const src)
{
dest->lineNumber = src->lineNumber;
dest->filePosition = src->filePosition;
dest->type = src->type;
dest->keyword = src->keyword;
vStringCopy (dest->string, src->string);
vStringCopy (dest->scope, src->scope);
}
/*
* Scanning functions
*/
static boolean parseTag (tokenInfo *const token, texKind kind)
{
tokenInfo *const name = newToken ();
vString * fullname;
boolean useLongName = TRUE;
fullname = vStringNew ();
vStringClear (fullname);
/*
* Tex tags are of these formats:
* \keyword{any number of words}
* \keyword[short desc]{any number of words}
* \keyword*[short desc]{any number of words}
*
* When a keyword is found, loop through all words within
* the curly braces for the tag name.
*/
if (isType (token, TOKEN_KEYWORD))
{
copyToken (name, token);
readToken (token);
}
if (isType (token, TOKEN_OPEN_SQUARE))
{
useLongName = FALSE;
readToken (token);
while (! isType (token, TOKEN_CLOSE_SQUARE) )
{
if (isType (token, TOKEN_IDENTIFIER))
{
if (fullname->length > 0)
vStringCatS (fullname, " ");
vStringCatS (fullname, vStringValue (token->string));
}
readToken (token);
}
vStringTerminate (fullname);
vStringCopy (name->string, fullname);
makeTexTag (name, kind);
}
if (isType (token, TOKEN_STAR))
{
readToken (token);
}
if (isType (token, TOKEN_OPEN_CURLY))
{
readToken (token);
while (! isType (token, TOKEN_CLOSE_CURLY) )
{
if (isType (token, TOKEN_IDENTIFIER) && useLongName)
{
if (fullname->length > 0)
vStringCatS (fullname, " ");
vStringCatS (fullname, vStringValue (token->string));
}
readToken (token);
}
if (useLongName)
{
vStringTerminate (fullname);
vStringCopy (name->string, fullname);
makeTexTag (name, kind);
}
}
deleteToken (name);
vStringDelete (fullname);
return TRUE;
}
static void parseTexFile (tokenInfo *const token)
{
do
{
readToken (token);
if (isType (token, TOKEN_KEYWORD))
{
switch (token->keyword)
{
case KEYWORD_chapter:
parseTag (token, TEXTAG_CHAPTER);
break;
case KEYWORD_section:
parseTag (token, TEXTAG_SECTION);
break;
case KEYWORD_subsection:
parseTag (token, TEXTAG_SUBSUBSECTION);
break;
case KEYWORD_subsubsection:
parseTag (token, TEXTAG_SUBSUBSECTION);
break;
case KEYWORD_part:
parseTag (token, TEXTAG_PART);
break;
case KEYWORD_paragraph:
parseTag (token, TEXTAG_PARAGRAPH);
break;
case KEYWORD_subparagraph:
parseTag (token, TEXTAG_SUBPARAGRAPH);
break;
default:
break;
}
}
} while (TRUE);
}
static void initialize (const langType language)
{
Assert (sizeof (TexKinds) / sizeof (TexKinds [0]) == TEXTAG_COUNT);
Lang_js = language;
buildTexKeywordHash ();
}
static void findTexTags (void)
{
tokenInfo *const token = newToken ();
exception_t exception;
exception = (exception_t) (setjmp (Exception));
while (exception == ExceptionNone)
parseTexFile (token);
deleteToken (token);
}
/* Create parser definition stucture */
extern parserDefinition* TexParser (void)
{
static const char *const extensions [] = { "tex", NULL };
parserDefinition *const def = parserNew ("Tex");
def->extensions = extensions;
/*
* New definitions for parsing instead of regex
*/
def->kinds = TexKinds;
def->kindCount = KIND_COUNT (TexKinds);
def->parser = findTexTags;
def->initialize = initialize;
return def;
}
/* vi:set tabstop=4 shiftwidth=4 noexpandtab: */
<|start_filename|>gedit/plugins/goto-definition-gedit3/ctags/dosbatch.c<|end_filename|>
/*
* $Id$
*
* Copyright (c) 2009, <NAME>
*
* This source code is released for free distribution under the terms of the
* GNU General Public License.
*
* This module contains functions for generating tags for DOS Batch language files.
*/
/*
* INCLUDE FILES
*/
#include "general.h" /* must always come first */
#include <string.h>
#include "parse.h"
/*
* FUNCTION DEFINITIONS
*/
static void installDosBatchRegex (const langType language)
{
addTagRegex (language,
"^:([A-Za-z_0-9]+)", "\\1", "l,label,labels", NULL);
addTagRegex (language,
"set[ \t]+([A-Za-z_0-9]+)[ \t]*=", "\\1", "v,variable,variables", NULL);
}
extern parserDefinition* DosBatchParser ()
{
static const char *const extensions [] = { "bat", "cmd", NULL };
parserDefinition* const def = parserNew ("DosBatch");
def->extensions = extensions;
def->initialize = installDosBatchRegex;
def->regex = TRUE;
return def;
}
/* vi:set tabstop=4 shiftwidth=4: */
| lkumar93/dotfiles |
<|start_filename|>pkg/metrics/metrics_test.go<|end_filename|>
package metrics
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestNewMetricsRepository(t *testing.T) {
testCases := []struct {
entries []MetricEntry
expectedLen int
}{
{
expectedLen: 1,
},
{
entries: []MetricEntry{
{
Label: MetricsLabel("fake"),
Metric: &intMetric{},
},
},
expectedLen: 2,
},
}
for _, tc := range testCases {
repo := NewMetricsRepository(tc.entries...)
assert.Equal(t, len(repo), tc.expectedLen)
}
}
<|start_filename|>pkg/resolve/resolve.go<|end_filename|>
package resolve
import (
"time"
"github.com/gorift/gorift/pkg/server"
)
type Resolver interface {
Lookup(ResolveRequest) (ResolveReport, error)
}
type ResolveRequest struct {
Host server.Host
}
type ResolveReport struct {
Addresses []server.Address
LastCheck time.Time
}
<|start_filename|>pkg/discovery/discovery.go<|end_filename|>
package discovery
import (
"time"
"golang.org/x/xerrors"
"github.com/gorift/gorift/pkg/metrics"
"github.com/gorift/gorift/pkg/monitor"
"github.com/gorift/gorift/pkg/monitor/healthcheck"
"github.com/gorift/gorift/pkg/resolve"
"github.com/gorift/gorift/pkg/server"
)
type Option struct {
Interval time.Duration
Resolver resolve.Resolver
}
func (opt Option) Validate() error {
if opt.Interval <= 0 {
return xerrors.New("non-positive interval for ticker")
}
if opt.Resolver == nil {
return xerrors.New("no Resolver")
}
return nil
}
type Discovery interface {
GetMembers() []*server.Member
Shutdown()
}
func New(
host server.Host,
port server.Port,
maybeDiscoveryOption *Option,
maybeHealthcheckMonitorOption *healthcheck.Option,
metricsEntries []metrics.MetricEntry,
) Discovery {
discovery := newNopDiscovery(
host,
port,
maybeHealthcheckMonitorOption,
metricsEntries,
)
if maybeDiscoveryOption != nil {
if err := maybeDiscoveryOption.Validate(); err == nil {
discovery = newDefaultDiscovery(
host, port,
*maybeDiscoveryOption,
maybeHealthcheckMonitorOption,
metricsEntries,
)
}
}
return discovery
}
type nopDiscovery struct {
host server.Host
port server.Port
monitor *monitor.Monitor
}
func newNopDiscovery(
host server.Host,
port server.Port,
maybeHealthcheckMonitorOption *healthcheck.Option,
metricsEntries []metrics.MetricEntry,
) Discovery {
m := monitor.New(
server.Address(host),
port,
maybeHealthcheckMonitorOption,
metricsEntries,
)
return &nopDiscovery{
host: host,
port: port,
monitor: m,
}
}
func (d *nopDiscovery) GetMembers() []*server.Member {
return []*server.Member{
server.NewMember(
d.host,
server.Address(d.host),
d.port,
d.monitor.GetHealthStatus(),
d.monitor.GetMetricsRepository(),
),
}
}
func (d *nopDiscovery) Shutdown() {
d.monitor.Shutdown()
}
<|start_filename|>pkg/monitor/monitor.go<|end_filename|>
package monitor
import (
"github.com/gorift/gorift/pkg/metrics"
healthcheckmonitor "github.com/gorift/gorift/pkg/monitor/healthcheck"
"github.com/gorift/gorift/pkg/server"
)
type Monitor struct {
healthcheckMonitor healthcheckmonitor.Monitor
metricsRepository metrics.MetricsRepository
doneCh chan struct{}
}
func New(
address server.Address,
port server.Port,
maybeHealthcheckMonitorOption *healthcheckmonitor.Option,
metricsEntries []metrics.MetricEntry,
) *Monitor {
healthcheckMonitor := healthcheckmonitor.NewNopMonitor()
if maybeHealthcheckMonitorOption != nil {
if err := maybeHealthcheckMonitorOption.Validate(); err == nil {
healthcheckMonitor = healthcheckmonitor.NewDefaultMonitor(
address, port, *maybeHealthcheckMonitorOption)
}
}
return newMonitor(
healthcheckMonitor,
metricsEntries,
)
}
func newMonitor(
healthcheckMonitor healthcheckmonitor.Monitor,
metricsEntries []metrics.MetricEntry,
) *Monitor {
monitor := &Monitor{
healthcheckMonitor: healthcheckMonitor,
metricsRepository: metrics.NewMetricsRepository(metricsEntries...),
doneCh: make(chan struct{}),
}
go monitor.exec()
return monitor
}
func (m *Monitor) GetHealthStatus() server.HealthStatus {
return m.healthcheckMonitor.GetHealthStatus()
}
func (m *Monitor) GetMetricsRepository() metrics.MetricsRepository {
return m.metricsRepository
}
func (m *Monitor) exec() {
for {
select {
case <-m.doneCh:
m.healthcheckMonitor.Shutdown()
}
}
}
func (m *Monitor) Shutdown() {
close(m.doneCh)
}
<|start_filename|>pkg/server/server.go<|end_filename|>
package server
import (
"strconv"
"sync"
"time"
"golang.org/x/xerrors"
"github.com/gorift/gorift/pkg/metrics"
)
type Server struct {
Host Host
Port Port
}
type Host string
func (h Host) String() string {
return string(h)
}
type Address string
func (a Address) String() string {
return string(a)
}
type Port int
func (p Port) String() string {
return strconv.Itoa(int(p))
}
type Member struct {
Host Host
Address Address
Port Port
HealthStatus HealthStatus
mu sync.RWMutex
metricsRepository metrics.MetricsRepository
}
type HealthStatus struct {
Available bool
LastCheck time.Time
}
func NewMember(
host Host,
address Address,
port Port,
healthStatus HealthStatus,
metricsRepository metrics.MetricsRepository,
) *Member {
return &Member{
Host: host,
Address: address,
Port: port,
HealthStatus: healthStatus,
metricsRepository: metricsRepository,
}
}
func (m *Member) GetMetrics(label metrics.MetricsLabel) (interface{}, error) {
m.mu.RLock()
defer m.mu.RUnlock()
metric, ok := m.metricsRepository[label]
if !ok {
return nil, xerrors.Errorf("%s is not found in metrics repository", label)
}
return metric.Get(), nil
}
func (m *Member) AddMetrics(label metrics.MetricsLabel, val interface{}) error {
m.mu.Lock()
defer m.mu.Unlock()
metric, ok := m.metricsRepository[label]
if !ok {
return xerrors.Errorf("%s is not found in metrics repository", label)
}
return metric.Add(val)
}
<|start_filename|>examples/balancer/main.go<|end_filename|>
package main
import (
"fmt"
"os"
"time"
"github.com/kr/pretty"
"go.uber.org/zap"
"github.com/gorift/gorift/pkg/balance"
"github.com/gorift/gorift/pkg/balance/algorithm"
"github.com/gorift/gorift/pkg/balance/middleware/filter"
"github.com/gorift/gorift/pkg/healthcheck"
"github.com/gorift/gorift/pkg/metrics"
"github.com/gorift/gorift/pkg/resolve"
"github.com/gorift/gorift/pkg/server"
)
type nopResolver struct{}
func (r *nopResolver) Lookup(req resolve.ResolveRequest) (resolve.ResolveReport, error) {
return resolve.ResolveReport{
Addresses: []server.Address{
server.Address(req.Host),
},
LastCheck: time.Now(),
}, nil
}
func nopHealthcheckFn() healthcheck.HealthcheckFn {
return healthcheck.HealthcheckFn(func(req healthcheck.HealthcheckRequest) (healthcheck.HealthcheckReport, error) {
return healthcheck.HealthcheckReport{
Available: true,
LastCheck: time.Now(),
}, nil
})
}
func nopFilterFn() filter.FilterFn {
return filter.FilterFn(func(members []*server.Member) []*server.Member {
return members
})
}
const (
fakeMetricLabel metrics.MetricsLabel = "fake"
)
type fakeMetric struct {
val int
}
func (f *fakeMetric) Add(val interface{}) error {
f.val += val.(int)
return nil
}
func (f *fakeMetric) Get() interface{} {
return f.val
}
func main() {
logger, err := zap.NewProduction()
if err != nil {
fmt.Println(err)
os.Exit(1)
}
balancer, err := balance.New(
balance.WithZapLogger(logger),
balance.WithBalancerAlgorithm(algorithm.NewRandom()),
balance.WithFilterFnList(nopFilterFn()),
balance.EnableDiscovery(time.Second, &nopResolver{}),
balance.EnableHealthcheck(time.Second, nopHealthcheckFn()),
balance.AddCustomMetrics(metrics.MetricEntry{
Label: fakeMetricLabel,
Metric: &fakeMetric{},
}),
)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
balancer.Register(
server.Server{
Host: server.Host("host1"),
Port: server.Port(8080),
},
server.Server{
Host: server.Host("host2"),
Port: server.Port(8080),
},
)
for i := 0; i < 10; i++ {
member, err := balancer.Pick()
if err != nil {
fmt.Println(err)
os.Exit(1)
}
member.AddMetrics(fakeMetricLabel, 1)
}
members := balancer.GetMembers()
for _, member := range members {
fmt.Printf("%# v", pretty.Formatter(member))
}
}
<|start_filename|>pkg/errors/global/global.go<|end_filename|>
package global
import (
"go.uber.org/zap"
)
var (
errCh chan error
)
func init() {
errCh = make(chan error)
}
func SendError(err error) {
errCh <- err
}
func LogError(logger *zap.Logger) {
for err := range errCh {
logger.Error("balancer error on background", zap.Error(err))
}
}
func Close() {
close(errCh)
}
<|start_filename|>pkg/discovery/multi_discovery.go<|end_filename|>
package discovery
import (
"sync"
"github.com/gorift/gorift/pkg/server"
)
type MultiDiscovery struct {
mu sync.RWMutex
discoveries map[server.Host]Discovery
}
func NewMultiDiscovery() *MultiDiscovery {
return &MultiDiscovery{
discoveries: make(map[server.Host]Discovery),
}
}
func (d *MultiDiscovery) Register(
srv server.Server,
discovery Discovery,
) {
d.mu.Lock()
defer d.mu.Unlock()
if _, ok := d.discoveries[srv.Host]; !ok {
d.discoveries[srv.Host] = discovery
}
}
func (d *MultiDiscovery) GetMembers() []*server.Member {
d.mu.RLock()
defer d.mu.RUnlock()
var members []*server.Member
for _, discover := range d.discoveries {
members = append(members, discover.GetMembers()...)
}
return members
}
func (d *MultiDiscovery) Shutdown() {
d.mu.Lock()
defer d.mu.Unlock()
for _, v := range d.discoveries {
v.Shutdown()
}
}
<|start_filename|>pkg/discovery/default_discovery.go<|end_filename|>
package discovery
import (
"sync"
"time"
"github.com/gorift/gorift/pkg/errors/global"
"github.com/gorift/gorift/pkg/metrics"
"github.com/gorift/gorift/pkg/monitor"
"github.com/gorift/gorift/pkg/monitor/healthcheck"
"github.com/gorift/gorift/pkg/resolve"
"github.com/gorift/gorift/pkg/server"
)
type defaultDiscovery struct {
host server.Host
port server.Port
option Option
healthcheckMonitorOption *healthcheck.Option
metricsEntries []metrics.MetricEntry
mu sync.RWMutex
marks map[server.Address]bool
monitors map[server.Address]*monitor.Monitor
doneCh chan struct{}
}
func newDefaultDiscovery(
host server.Host,
port server.Port,
option Option,
maybeHealthcheckMonitorOption *healthcheck.Option,
metricsEntries []metrics.MetricEntry,
) Discovery {
marks := make(map[server.Address]bool)
monitors := make(map[server.Address]*monitor.Monitor)
// [TODO] initial status: whether it registers host for monitor or not.
hostAsAddress := server.Address(host)
marks[hostAsAddress] = true
monitors[hostAsAddress] = monitor.New(
hostAsAddress,
port,
maybeHealthcheckMonitorOption,
metricsEntries,
)
d := &defaultDiscovery{
host: host,
port: port,
option: option,
healthcheckMonitorOption: maybeHealthcheckMonitorOption,
metricsEntries: metricsEntries,
marks: marks,
monitors: monitors,
doneCh: make(chan struct{}),
}
go d.exec()
return d
}
func (d *defaultDiscovery) GetMembers() []*server.Member {
d.mu.RLock()
defer d.mu.RUnlock()
var members []*server.Member
for address, monitor := range d.monitors {
members = append(members, server.NewMember(
d.host,
address,
d.port,
monitor.GetHealthStatus(),
monitor.GetMetricsRepository(),
))
}
return members
}
func (d *defaultDiscovery) exec() {
ticker := time.NewTicker(d.option.Interval)
defer ticker.Stop()
for {
select {
case <-d.doneCh:
go d.shutdownMembers()
return
case <-ticker.C:
go d.handle()
}
}
}
func (d *defaultDiscovery) Shutdown() {
close(d.doneCh)
}
func (d *defaultDiscovery) shutdownMembers() {
d.mu.Lock()
defer d.mu.Unlock()
for _, v := range d.monitors {
v.Shutdown()
}
}
func (d *defaultDiscovery) handle() {
report, err := d.option.Resolver.Lookup(
resolve.ResolveRequest{
Host: d.host,
},
)
if err != nil {
global.SendError(err)
return
}
go d.update(report)
}
func (d *defaultDiscovery) update(report resolve.ResolveReport) {
d.mu.Lock()
defer d.mu.Unlock()
for address := range d.marks {
d.marks[address] = false
}
for _, address := range report.Addresses {
d.marks[address] = true
}
for address, marked := range d.marks {
if marked {
if _, ok := d.monitors[address]; !ok {
// marked and existed
d.monitors[address] = monitor.New(
address,
d.port,
d.healthcheckMonitorOption,
d.metricsEntries,
)
}
} else {
// not marked
delete(d.monitors, address)
}
}
}
<|start_filename|>pkg/healthcheck/healthcheck.go<|end_filename|>
package healthcheck
import (
"time"
"github.com/gorift/gorift/pkg/server"
)
type HealthcheckFn func(HealthcheckRequest) (HealthcheckReport, error)
type HealthcheckRequest struct {
Address server.Address
Port server.Port
}
type HealthcheckReport struct {
Available bool
LastCheck time.Time
}
<|start_filename|>pkg/metrics/default_metrics.go<|end_filename|>
package metrics
import (
"golang.org/x/xerrors"
)
const (
TotalPickedLabel MetricsLabel = "Gorift_TotalPicked"
)
var (
TotalPickedMetric Metric = &intMetric{}
)
type intMetric struct {
val int
}
func (m *intMetric) Add(val interface{}) error {
var t int
switch val.(type) {
case int, int8, int16, int32, int64:
t = val.(int)
default:
return xerrors.Errorf("expected int, but got %v", val)
}
m.val += t
return nil
}
func (m *intMetric) Get() interface{} {
return m.val
}
<|start_filename|>pkg/monitor/healthcheck/healthcheck.go<|end_filename|>
package healthcheck
import (
"time"
"golang.org/x/xerrors"
"github.com/gorift/gorift/pkg/healthcheck"
"github.com/gorift/gorift/pkg/server"
)
type Option struct {
Interval time.Duration
Fn healthcheck.HealthcheckFn
}
func (opt Option) Validate() error {
if opt.Interval <= 0 {
return xerrors.New("non-positive interval for ticker")
}
if opt.Fn == nil {
return xerrors.New("no HealthcheckFn")
}
return nil
}
type Monitor interface {
GetHealthStatus() server.HealthStatus
Shutdown()
}
type nopMonitor struct{}
func NewNopMonitor() Monitor {
return &nopMonitor{}
}
func (m *nopMonitor) GetHealthStatus() server.HealthStatus {
return server.HealthStatus{
Available: true,
LastCheck: time.Now(),
}
}
func (m *nopMonitor) Shutdown() {}
<|start_filename|>pkg/metrics/metrics.go<|end_filename|>
package metrics
import (
"github.com/mohae/deepcopy"
)
type MetricsLabel string
type Metric interface {
Add(interface{}) error
Get() interface{}
}
type MetricEntry struct {
Label MetricsLabel
Metric Metric
}
type MetricsRepository map[MetricsLabel]Metric
func NewMetricsRepository(entries ...MetricEntry) MetricsRepository {
repository := make(MetricsRepository)
// default metrics for balancer
repository[TotalPickedLabel] = deepcopy.Copy(TotalPickedMetric).(Metric)
for _, entry := range entries {
repository[entry.Label] = deepcopy.Copy(entry.Metric).(Metric)
}
return repository
}
<|start_filename|>pkg/healthcheck/ping.go<|end_filename|>
package healthcheck
import (
"net"
"time"
)
func Ping(timeout time.Duration) HealthcheckFn {
return HealthcheckFn(func(req HealthcheckRequest) (HealthcheckReport, error) {
resp := HealthcheckReport{
Available: false,
LastCheck: time.Now(),
}
conn, err := net.DialTimeout("tcp", net.JoinHostPort(req.Address.String(), req.Port.String()), timeout)
if err != nil {
return resp, err
}
conn.Close()
resp.Available = true
return resp, nil
})
}
<|start_filename|>pkg/errors/errors.go<|end_filename|>
package errors
import (
"bytes"
)
type MergedError []error
func (e MergedError) Error() string {
var buf bytes.Buffer
for _, err := range e {
buf.WriteString(err.Error() + ": ")
}
res := buf.String()
if buf.Len() >= 2 {
res = res[:buf.Len()-2]
}
return res
}
func (e MergedError) Len() int {
return len(e)
}
func (e MergedError) Add(err error) {
if merged, ok := err.(MergedError); ok {
e = append(e, merged...)
} else {
e = append(e, err)
}
}
<|start_filename|>pkg/errors/errors_test.go<|end_filename|>
package errors
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestMergedError(t *testing.T) {
testCases := []struct {
errs []error
expected string
}{
{
errs: []error{},
expected: "",
},
}
for _, tc := range testCases {
var merged MergedError
for _, err := range tc.errs {
merged.Add(err)
}
assert.Equal(t, tc.expected, merged.Error())
}
}
<|start_filename|>pkg/resolve/default_resolver.go<|end_filename|>
package resolve
import (
"net"
"time"
"github.com/miekg/dns"
"golang.org/x/xerrors"
"github.com/gorift/gorift/pkg/server"
)
var (
defaultResolvConfPath = "/etc/resolv.conf"
)
type DefaultResolver struct {
cfg *dns.ClientConfig
client *dns.Client
}
func NewDefaultResolver() (Resolver, error) {
cfg, err := dns.ClientConfigFromFile(defaultResolvConfPath)
if err != nil {
return nil, xerrors.Errorf("failed to create resolver: %w", err)
}
return &DefaultResolver{
cfg: cfg,
client: &dns.Client{},
}, nil
}
func (r *DefaultResolver) Lookup(req ResolveRequest) (ResolveReport, error) {
m4 := &dns.Msg{}
m4.SetQuestion(dns.Fqdn(req.Host.String()), dns.TypeA)
resp, _, err := r.client.Exchange(m4, selectServer(r.cfg))
if err != nil {
return ResolveReport{}, err
}
addresses := make([]server.Address, 0)
if resp.Rcode == dns.RcodeSuccess {
for _, ans := range resp.Answer {
record := ans.(*dns.A)
addresses = append(addresses, server.Address(record.A.String()))
}
}
return ResolveReport{
Addresses: addresses,
LastCheck: time.Now(),
}, nil
}
func selectServer(cfg *dns.ClientConfig) string {
return net.JoinHostPort(cfg.Servers[0], cfg.Port)
}
<|start_filename|>pkg/metrics/default_metrics_test.go<|end_filename|>
package metrics
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestUint64Metrics(t *testing.T) {
testCases := []struct {
input interface{}
isErrOnAdd bool
expectedOnGet interface{}
}{
{
input: int(1),
isErrOnAdd: false,
expectedOnGet: int(1),
},
{
input: nil,
isErrOnAdd: true,
},
{
input: "str",
isErrOnAdd: true,
},
}
for _, tc := range testCases {
metric := &intMetric{}
err := metric.Add(tc.input)
if tc.isErrOnAdd {
assert.Error(t, err)
} else {
assert.Equal(t, tc.expectedOnGet, metric.Get())
}
}
}
<|start_filename|>pkg/balance/algorithm/p2c.go<|end_filename|>
package algorithm
import (
"math/rand"
"golang.org/x/xerrors"
"github.com/gorift/gorift/pkg/metrics"
"github.com/gorift/gorift/pkg/server"
)
type p2c struct{}
func NewP2C() Algorithm {
return &p2c{}
}
func (p *p2c) Pick(members []*server.Member) (*server.Member, error) {
n := len(members)
if n < 1 {
return nil, xerrors.New("there are no members")
} else if n == 1 {
return members[0], nil
} else if n == 2 {
return pickWithTotalPicked(members[0], members[1])
} else {
m1 := members[rand.Intn(n)]
m2 := members[rand.Intn(n)]
return pickWithTotalPicked(m1, m2)
}
}
func pickWithTotalPicked(m1, m2 *server.Member) (*server.Member, error) {
m1metric, err := m1.GetMetrics(metrics.TotalPickedLabel)
if err != nil {
return nil, err
}
m2metric, err := m2.GetMetrics(metrics.TotalPickedLabel)
if err != nil {
return nil, err
}
m1load := m1metric.(int)
m2load := m2metric.(int)
res := m1
if m1load > m2load {
res = m2
}
return res, nil
}
<|start_filename|>pkg/balance/middleware/filter/filter_test.go<|end_filename|>
package filter
import (
"reflect"
"testing"
"github.com/stretchr/testify/assert"
"github.com/gorift/gorift/pkg/server"
)
func TestAvailables(t *testing.T) {
testCases := []struct {
members []*server.Member
expected []*server.Member
}{
{
members: []*server.Member{
server.NewMember("h1", server.Address(""), server.Port(8080), server.HealthStatus{Available: true}, nil),
server.NewMember("h2", server.Address(""), server.Port(8080), server.HealthStatus{}, nil),
},
expected: []*server.Member{
server.NewMember("h1", server.Address(""), server.Port(8080), server.HealthStatus{Available: true}, nil),
},
},
{
members: []*server.Member{
server.NewMember("h1", server.Address(""), server.Port(8080), server.HealthStatus{}, nil),
server.NewMember("h2", server.Address(""), server.Port(8080), server.HealthStatus{}, nil),
},
expected: []*server.Member{},
},
{
members: []*server.Member{},
expected: []*server.Member{},
},
{
members: nil,
expected: []*server.Member{},
},
}
for _, tc := range testCases {
filtered := Availables()(tc.members)
assert.True(t, reflect.DeepEqual(tc.expected, filtered))
}
}
<|start_filename|>pkg/balance/middleware/filter/filter.go<|end_filename|>
package filter
import (
"github.com/gorift/gorift/pkg/server"
)
type FilterFn func([]*server.Member) []*server.Member
func Availables() FilterFn {
return FilterFn(func(members []*server.Member) []*server.Member {
res := make([]*server.Member, 0)
for _, v := range members {
if v.HealthStatus.Available {
res = append(res, v)
}
}
return res
})
}
<|start_filename|>pkg/monitor/healthcheck/default_healthcheck.go<|end_filename|>
package healthcheck
import (
"sync"
"time"
"github.com/gorift/gorift/pkg/errors/global"
"github.com/gorift/gorift/pkg/healthcheck"
"github.com/gorift/gorift/pkg/server"
)
type defaultMonitor struct {
address server.Address
port server.Port
option Option
mu sync.RWMutex
status *server.HealthStatus
doneCh chan struct{}
}
func NewDefaultMonitor(address server.Address, port server.Port, option Option) Monitor {
monitor := &defaultMonitor{
address: address,
port: port,
option: option,
// [TODO] initial status: Available true/false, or whether do checkFn on here.
status: &server.HealthStatus{
Available: true,
},
doneCh: make(chan struct{}),
}
go monitor.exec()
return monitor
}
func (m *defaultMonitor) GetHealthStatus() server.HealthStatus {
m.mu.RLock()
defer m.mu.RUnlock()
return *m.status
}
func (m *defaultMonitor) exec() {
ticker := time.NewTicker(m.option.Interval)
defer ticker.Stop()
for {
select {
case <-m.doneCh:
return
case <-ticker.C:
go m.handle()
}
}
}
func (m *defaultMonitor) Shutdown() {
close(m.doneCh)
}
func (m *defaultMonitor) handle() {
report, err := m.option.Fn(healthcheck.HealthcheckRequest{
Address: m.address,
Port: m.port,
})
if err != nil {
global.SendError(err)
return
}
go m.update(report)
}
func (m *defaultMonitor) update(
report healthcheck.HealthcheckReport,
) {
m.mu.Lock()
defer m.mu.Unlock()
m.status.Available = report.Available
m.status.LastCheck = report.LastCheck
}
<|start_filename|>pkg/balance/algorithm/random_test.go<|end_filename|>
package algorithm
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/gorift/gorift/pkg/server"
)
func TestRandom(t *testing.T) {
testCases := []struct {
members []*server.Member
isErr bool
}{
{
members: []*server.Member{
server.NewMember("h1", server.Address(""), server.Port(8080), server.HealthStatus{}, nil),
server.NewMember("h2", server.Address(""), server.Port(8080), server.HealthStatus{}, nil),
},
isErr: false,
},
{
members: []*server.Member{
server.NewMember("h1", server.Address(""), server.Port(8080), server.HealthStatus{}, nil),
},
isErr: false,
},
{
members: []*server.Member{},
isErr: true,
},
{
members: nil,
isErr: true,
},
}
for _, tc := range testCases {
algo := NewRandom()
_, err := algo.Pick(tc.members)
assert.Equal(t, tc.isErr, err != nil)
}
}
<|start_filename|>pkg/balance/balancer.go<|end_filename|>
package balance
import (
"time"
"go.uber.org/zap"
"github.com/gorift/gorift/pkg/balance/algorithm"
"github.com/gorift/gorift/pkg/balance/middleware/filter"
"github.com/gorift/gorift/pkg/discovery"
"github.com/gorift/gorift/pkg/errors"
"github.com/gorift/gorift/pkg/errors/global"
"github.com/gorift/gorift/pkg/healthcheck"
"github.com/gorift/gorift/pkg/metrics"
healthcheckmonitor "github.com/gorift/gorift/pkg/monitor/healthcheck"
"github.com/gorift/gorift/pkg/resolve"
"github.com/gorift/gorift/pkg/server"
)
var (
defaultLogger = zap.NewNop()
defaultAlgorithm = algorithm.NewRandom()
defaultFilterFnList = []filter.FilterFn{filter.Availables()}
)
type option struct {
logger *zap.Logger
algorithm algorithm.Algorithm
filterFnList []filter.FilterFn
maybeDiscoveryOption *discovery.Option
maybeHealthcheckMonitorOption *healthcheckmonitor.Option
metricsEntries []metrics.MetricEntry
}
type BalancerOption func(*option)
func WithZapLogger(logger *zap.Logger) BalancerOption {
return BalancerOption(func(opt *option) {
opt.logger = logger
})
}
func WithBalancerAlgorithm(algorithm algorithm.Algorithm) BalancerOption {
return BalancerOption(func(opt *option) {
opt.algorithm = algorithm
})
}
func WithFilterFnList(funcs ...filter.FilterFn) BalancerOption {
return BalancerOption(func(opt *option) {
opt.filterFnList = funcs
})
}
func EnableDiscovery(
interval time.Duration,
resolver resolve.Resolver,
) BalancerOption {
return BalancerOption(func(opt *option) {
opt.maybeDiscoveryOption = &discovery.Option{
Interval: interval,
Resolver: resolver,
}
})
}
func EnableHealthcheck(
interval time.Duration,
fn healthcheck.HealthcheckFn,
) BalancerOption {
return BalancerOption(func(opt *option) {
opt.maybeHealthcheckMonitorOption = &healthcheckmonitor.Option{
Interval: interval,
Fn: fn,
}
})
}
func AddCustomMetrics(
entries ...metrics.MetricEntry,
) BalancerOption {
return BalancerOption(func(opt *option) {
opt.metricsEntries = entries
})
}
type Balancer struct {
logger *zap.Logger
algorithm algorithm.Algorithm
filterFnList []filter.FilterFn
maybeDiscoveryOption *discovery.Option
maybeHealthcheckMonitorOption *healthcheckmonitor.Option
metricsEntries []metrics.MetricEntry
multiDiscovery *discovery.MultiDiscovery
}
func New(opts ...BalancerOption) (*Balancer, error) {
opt := option{
logger: defaultLogger,
algorithm: defaultAlgorithm,
filterFnList: defaultFilterFnList,
}
for _, fn := range opts {
fn(&opt)
}
balancer := &Balancer{
logger: opt.logger,
algorithm: opt.algorithm,
filterFnList: opt.filterFnList,
maybeDiscoveryOption: opt.maybeDiscoveryOption,
maybeHealthcheckMonitorOption: opt.maybeHealthcheckMonitorOption,
metricsEntries: opt.metricsEntries,
multiDiscovery: discovery.NewMultiDiscovery(),
}
if err := balancer.Validate(); err != nil {
return nil, err
}
go global.LogError(balancer.logger)
return balancer, nil
}
func (b *Balancer) Validate() error {
var merged errors.MergedError
if b.maybeDiscoveryOption != nil {
merged.Add(b.maybeDiscoveryOption.Validate())
}
if b.maybeHealthcheckMonitorOption != nil {
merged.Add(b.maybeHealthcheckMonitorOption.Validate())
}
if merged.Len() <= 0 {
return nil
}
return merged
}
func (b *Balancer) Register(servers ...server.Server) {
for _, srv := range servers {
d := discovery.New(
srv.Host, srv.Port,
b.maybeDiscoveryOption,
b.maybeHealthcheckMonitorOption,
b.metricsEntries,
)
b.multiDiscovery.Register(srv, d)
}
}
func (b *Balancer) GetMembers() []*server.Member {
return b.multiDiscovery.GetMembers()
}
func (b *Balancer) Pick() (*server.Member, error) {
candidate := b.multiDiscovery.GetMembers()
for _, fn := range b.filterFnList {
candidate = fn(candidate)
}
picked, err := b.algorithm.Pick(candidate)
postPick(picked, err)
return picked, err
}
func postPick(picked *server.Member, errOnPick error) {
if errOnPick == nil && picked != nil {
picked.AddMetrics(metrics.TotalPickedLabel, 1)
}
}
func (b *Balancer) Shutdown() {
b.multiDiscovery.Shutdown()
global.Close()
}
<|start_filename|>pkg/server/server_test.go<|end_filename|>
package server
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestHostString(t *testing.T) {
testCases := []struct {
host Host
expected string
}{
{
host: Host("localhost"),
expected: "localhost",
},
}
for _, tc := range testCases {
assert.Equal(t, tc.host.String(), tc.expected)
}
}
func TestAddressString(t *testing.T) {
testCases := []struct {
address Address
expected string
}{
{
address: Address("127.0.0.1"),
expected: "127.0.0.1",
},
}
for _, tc := range testCases {
assert.Equal(t, tc.address.String(), tc.expected)
}
}
func TestPortString(t *testing.T) {
testCases := []struct {
port Port
expected string
}{
{
port: Port(8080),
expected: "8080",
},
}
for _, tc := range testCases {
assert.Equal(t, tc.port.String(), tc.expected)
}
}
<|start_filename|>pkg/balance/algorithm/algorithm.go<|end_filename|>
package algorithm
import (
"github.com/gorift/gorift/pkg/server"
)
type Algorithm interface {
Pick([]*server.Member) (*server.Member, error)
}
<|start_filename|>pkg/balance/algorithm/random.go<|end_filename|>
package algorithm
import (
"math/rand"
"golang.org/x/xerrors"
"github.com/gorift/gorift/pkg/server"
)
type random struct {
}
func NewRandom() Algorithm {
return &random{}
}
func (r *random) Pick(members []*server.Member) (*server.Member, error) {
n := len(members)
if n < 1 {
return nil, xerrors.New("there are no members")
} else if n == 1 {
return members[0], nil
}
return members[rand.Intn(n)], nil
}
| gorift/gorift |
<|start_filename|>assets/js/search.js<|end_filename|>
// Credits to search implementation: https://gist.github.com/cmod/5410eae147e4318164258742dd053993
var fuse; // holds our search engine
var searchVisible = false;
var firstRun = true; // allow us to delay loading json data unless search activated
var list = document.querySelector('.search-list'); // targets the <ul>
var first = list.firstChild; // first child of search list
var last = list.lastChild; // last child of search list
var maininput = document.querySelector('.search-ui input'); // input box for search
var searchResultsHeading = document.querySelector('.search-results'); // input box for search
var noResults = document.querySelector('.no-results'); // input box for search
var resultsAvailable = false; // Did we get any search results?
// ==========================================
// The main keyboard event listener running the show
//
document.querySelector('.open-search').addEventListener('click', openSearch);
document.querySelector('.close-search').addEventListener('click', closeSearch);
function closeSearch() {
document.querySelector('.search-ui').classList.add("hidden");
document.activeElement.blur(); // remove focus from search box
searchVisible = false; // search not visible
searchResultsHeading.classList.add('hidden');
}
function openSearch() {
// Load json search index if first time invoking search
// Means we don't load json unless searches are going to happen; keep user payload small unless needed
if (firstRun) {
loadSearch(); // loads our json data and builds fuse.js search index
firstRun = false; // let's never do this again
}
// Toggle visibility of search box
if (!searchVisible) {
document.querySelector('.search-ui').classList.remove("hidden");
document.querySelector('.search-ui input').focus(); // put focus in input box so you can just start typing
searchVisible = true; // search visible
}
}
document.addEventListener('keydown', function (event) {
if (event.metaKey && event.which === 191) {
openSearch()
}
// Allow ESC (27) to close search box
if (event.keyCode == 27) {
if (searchVisible) {
document.querySelector('.search-ui').classList.add("hidden");
document.activeElement.blur();
searchVisible = false;
searchResultsHeading.classList.add('hidden');
}
}
// DOWN (40) arrow
if (event.keyCode == 40) {
if (searchVisible && resultsAvailable) {
console.log("down");
event.preventDefault(); // stop window from scrolling
if (document.activeElement == maininput) { first.focus(); } // if the currently focused element is the main input --> focus the first <li>
else if (document.activeElement == last) { last.focus(); } // if we're at the bottom, stay there
else { document.activeElement.parentElement.nextSibling.firstElementChild.focus(); } // otherwise select the next search result
}
}
// UP (38) arrow
if (event.keyCode == 38) {
if (searchVisible && resultsAvailable) {
event.preventDefault(); // stop window from scrolling
if (document.activeElement == maininput) { maininput.focus(); } // If we're in the input box, do nothing
else if (document.activeElement == first) { maininput.focus(); } // If we're at the first item, go to input box
else { document.activeElement.parentElement.previousSibling.firstElementChild.focus(); } // Otherwise, select the search result above the current active one
}
}
})
// ==========================================
// execute search as each character is typed
//
document.querySelector('.search-ui input').onkeyup = function (e) {
executeSearch(this.value);
}
// ==========================================
// fetch some json without jquery
//
function fetchJSONFile(path, callback) {
var httpRequest = new XMLHttpRequest();
httpRequest.onreadystatechange = function () {
if (httpRequest.readyState === 4) {
if (httpRequest.status === 200) {
var data = JSON.parse(httpRequest.responseText);
if (callback) callback(data);
}
}
};
httpRequest.open('GET', path);
httpRequest.send();
}
// ==========================================
// load our search index, only executed once
// on first call of search box (CMD-/)
//
function loadSearch() {
fetchJSONFile('/index.json', function (data) {
var options = { // fuse.js options; check fuse.js website for details
shouldSort: true,
location: 0,
distance: 100,
threshold: 0.4,
minMatchCharLength: 2,
keys: [
'title',
'permalink',
'contents'
]
};
fuse = new Fuse(data, options); // build the index from the json file
});
}
// ==========================================
// using the index we loaded on CMD-/, run
// a search query (for "term") every time a letter is typed
// in the search box
//
function executeSearch(term) {
let results = fuse.search(term); // the actual query being run using fuse.js
let searchitems = ''; // our results bucket
if (results.length === 0) { // no results based on what was typed into the input box
resultsAvailable = false;
searchitems = '';
if (term !== "") {
noResults.classList.remove('hidden')
} else {
noResults.classList.add('hidden')
}
} else { // build our html
noResults.classList.add('hidden')
if (term !== "") {
searchResultsHeading.classList.remove('hidden');
}
for (let item in results.slice(0, 5)) { // only show first 5 results
const title = '<div class="text-2xl mb-2 font-bold">' + results[item].item.title + '</div>';
const date = results[item].item.date ? '<div><em class="">' + new Date(results[item].item.date).toDateString() + '</em></div>' : '';
const contents = '<div>' + results[item].item.contents + '</div>';
searchitems = searchitems + '<li><a class="block mb-2 px-4 py-2 rounded pb-2 border-b border-gray-200 dark:border-gray-600 focus:bg-gray-100 dark:focus:bg-gray-700 focus:outline-none" href="' + results[item].item.permalink + '" tabindex="0">' + title + date + contents + '</a></li>';
}
resultsAvailable = true;
}
list.innerHTML = searchitems;
if (results.length > 0) {
first = list.firstChild.firstElementChild; // first result container — used for checking against keyboard up/down location
last = list.lastChild.firstElementChild; // last result container — used for checking against keyboard up/down location
}
}
<|start_filename|>assets/css/paginator.css<|end_filename|>
.pagination {
@apply inline-flex border rounded px-1 dark:border-gray-600;
}
.page-item {
@apply px-3 py-1 mx-0.5 my-1 cursor-pointer rounded hover:bg-gray-800 hover:text-white dark:hover:bg-gray-500 dark:hover:text-white;
}
.page-item.active {
@apply bg-gray-800 text-white dark:bg-gray-300 dark:text-gray-900;
}
.page-item.disabled {
@apply text-gray-400 hover:bg-transparent hover:text-gray-400 cursor-not-allowed;
}
<|start_filename|>layouts/_default/single.html<|end_filename|>
{{ define "main" }}
{{ $lastmodstr := (partial "date.html" (dict "date" .Lastmod "language" $.Page.Language "format" "long")) }}
{{ $datestr := (partial "date.html" (dict "date" .Date "language" $.Page.Language "format" "long")) }}
{{ if .Params.thumbnail }}
<div class="relative max-w-5xl mx-auto px-4">
<img src="{{ .Params.thumbnail }}" class="rounded-lg shadow-sm w-full object-contain" />
<div class="absolute top-4 right-8 rounded shadow bg-white text-gray-900 dark:bg-gray-900 dark:text-white px-2 py-0.5">
{{ $datestr }}
</div>
</div>
{{ end }}
<article class="prose lg:prose-lg mx-auto my-8 dark:prose-dark px-4">
<h1 class="text-2xl font-bold mb-2">{{ .Title }}</h1>
<h5 class="text-sm flex items-center">
<svg xmlns="http://www.w3.org/2000/svg" class="mr-1" width="16" height="16" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<rect x="4" y="5" width="16" height="16" rx="2" />
<line x1="16" y1="3" x2="16" y2="7" />
<line x1="8" y1="3" x2="8" y2="7" />
<line x1="4" y1="11" x2="20" y2="11" />
<rect x="8" y="15" width="2" height="2" />
</svg>
{{ $datestr | i18n "postedOnDate" }}
{{ if ne $datestr $lastmodstr }}
 {{ $lastmodstr | i18n "lastModified" }}</h5><h5 class="text-sm flex items-center">
{{ else }}
•
{{ end }}
<svg xmlns="http://www.w3.org/2000/svg" class="mr-1" width="16" height="16" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<circle cx="12" cy="12" r="9" />
<polyline points="12 7 12 12 15 15" />
</svg>
{{ i18n "readingTime"}}{{ .ReadingTime }} {{ i18n "readTime" }}
•
<svg xmlns="http://www.w3.org/2000/svg" class="mx-1" width="16" height="16" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<path d="M3 19a9 9 0 0 1 9 0a9 9 0 0 1 9 0" />
<path d="M3 6a9 9 0 0 1 9 0a9 9 0 0 1 9 0" />
<line x1="3" y1="6" x2="3" y2="19" />
<line x1="12" y1="6" x2="12" y2="19" />
<line x1="21" y1="6" x2="21" y2="19" />
</svg>
{{ .WordCount }} {{ i18n "words" }}
{{ if .IsTranslated -}}
{{- $sortedTranslations := sort .Translations "Site.Language.Weight" -}}
{{- $links := apply $sortedTranslations "partial" "translation_link.html" "." -}}
{{- $cleanLinks := apply $links "chomp" "." -}}
{{- $linksOutput := delimit $cleanLinks (i18n "translationsSeparator") -}}
• {{ i18n "translationsLabel" }} {{ $linksOutput }}
{{- end }}
</h5>
{{ .Content }}
</article>
{{- partial "social.html" . -}}
{{ end }}
<|start_filename|>layouts/partials/date.html<|end_filename|>
{{ if eq "de" .language.Lang }}
{{ if eq "long" .format }}
{{ .date.Format "02" }}. {{ i18n (.date.Format "January")}} {{ .date.Format "2006"}}
{{ else }}
{{ .date.Format "02" }}. {{ substr (i18n (.date.Format "January")) 0 3 }} {{ .date.Format "06"}}
{{ end }}
{{ else }}
{{ if eq "long" .format }}
{{ .date.Format "January 2, 2006" }}
{{ else }}
{{ .date.Format "Jan 2, 06" }}
{{ end }}
{{ end }}
| yuuyins/blist-hugo-theme |
<|start_filename|>request_writer.go<|end_filename|>
package quictun
import (
"bytes"
"encoding/base64"
"fmt"
"log"
"net"
"net/http"
"strconv"
"strings"
"golang.org/x/net/http2"
"golang.org/x/net/http2/hpack"
"golang.org/x/net/idna"
"golang.org/x/net/lex/httplex"
quic "github.com/lucas-clemente/quic-go"
)
// http://www.ietf.org/rfc/rfc2617.txt
func basicAuth(username, password string) string {
auth := username + ":" + password
return base64.StdEncoding.EncodeToString([]byte(auth))
}
// rest is mostly from http2.Transport
// authorityAddr returns a given authority (a host/IP, or host:port / ip:port)
// and returns a host:port. The port 443 is added if needed.
func authorityAddr(host, port string) (addr string) {
if port == "" {
port = "443"
}
if a, err := idna.ToASCII(host); err == nil {
host = a
}
// IPv6 address literal, without a port:
if strings.HasPrefix(host, "[") && strings.HasSuffix(host, "]") {
return host + ":" + port
}
return net.JoinHostPort(host, port)
}
// shouldSendReqContentLength reports whether the http2.Transport should send
// a "content-length" request header. This logic is basically a copy of the net/http
// transferWriter.shouldSendContentLength.
// The contentLength is the corrected contentLength (so 0 means actually 0, not unknown).
// -1 means unknown.
func shouldSendReqContentLength(method string, contentLength int64) bool {
if contentLength > 0 {
return true
}
if contentLength < 0 {
return false
}
// For zero bodies, whether we send a content-length depends on the method.
// It also kinda doesn't matter for http2 either way, with END_STREAM.
switch method {
case "POST", "PUT", "PATCH":
return true
default:
return false
}
}
func validPseudoPath(v string) bool {
return (len(v) > 0 && v[0] == '/' && (len(v) == 1 || v[1] != '/')) || v == "*"
}
// actualContentLength returns a sanitized version of
// req.ContentLength, where 0 actually means zero (not unknown) and -1
// means unknown.
func actualContentLength(req *http.Request) int64 {
if req.Body == nil {
return 0
}
if req.ContentLength != 0 {
return req.ContentLength
}
return -1
}
type requestWriter struct {
headerStream quic.Stream
henc *hpack.Encoder
hbuf bytes.Buffer // HPACK encoder writes into this
}
func newRequestWriter(headerStream quic.Stream) *requestWriter {
rw := &requestWriter{
headerStream: headerStream,
}
rw.henc = hpack.NewEncoder(&rw.hbuf)
return rw
}
func (rw *requestWriter) WriteRequest(req *http.Request, dataStreamID quic.StreamID, endStream bool) error {
if u := req.URL.User; u != nil && req.Header.Get("Authorization") == "" {
username := u.Username()
password, _ := u.Password()
req.Header.Set("Authorization", "Basic "+basicAuth(username, password))
}
buf, err := rw.encodeHeaders(req, actualContentLength(req))
if err != nil {
log.Fatal("Failed to encode request headers: ", err)
return err
}
h2framer := http2.NewFramer(rw.headerStream, nil)
return h2framer.WriteHeaders(http2.HeadersFrameParam{
StreamID: uint32(dataStreamID),
EndHeaders: true,
EndStream: endStream,
BlockFragment: buf,
Priority: http2.PriorityParam{Weight: 0xff},
})
}
func (w *requestWriter) encodeHeaders(req *http.Request, contentLength int64) ([]byte, error) {
w.hbuf.Reset()
host := req.Host
if host == "" {
host = req.URL.Host
}
host, err := httplex.PunycodeHostPort(host)
if err != nil {
return nil, err
}
path := req.URL.RequestURI()
if !validPseudoPath(path) {
orig := path
path = strings.TrimPrefix(path, req.URL.Scheme+"://"+host)
if !validPseudoPath(path) {
if req.URL.Opaque != "" {
return nil, fmt.Errorf("invalid request :path %q from URL.Opaque = %q", orig, req.URL.Opaque)
} else {
return nil, fmt.Errorf("invalid request :path %q", orig)
}
}
}
// Check for any invalid headers and return an error before we
// potentially pollute our hpack state. (We want to be able to
// continue to reuse the hpack encoder for future requests)
for k, vv := range req.Header {
if !httplex.ValidHeaderFieldName(k) {
return nil, fmt.Errorf("invalid HTTP header name %q", k)
}
for _, v := range vv {
if !httplex.ValidHeaderFieldValue(v) {
return nil, fmt.Errorf("invalid HTTP header value %q for header %q", v, k)
}
}
}
// 8.1.2.3 Request Pseudo-Header Fields
// The :path pseudo-header field includes the path and query parts of the
// target URI (the path-absolute production and optionally a '?' character
// followed by the query production (see Sections 3.3 and 3.4 of
// [RFC3986]).
w.writeHeader(":authority", host)
w.writeHeader(":method", req.Method)
w.writeHeader(":path", path)
w.writeHeader(":scheme", req.URL.Scheme)
var didUA bool
for k, vv := range req.Header {
lowKey := strings.ToLower(k)
switch lowKey {
case "host", "content-length":
// Host is :authority, already sent.
// Content-Length is automatic, set below.
continue
case "connection", "proxy-connection", "transfer-encoding", "upgrade", "keep-alive":
// Per 8.1.2.2 Connection-Specific Header
// Fields, don't send connection-specific
// fields. We have already checked if any
// are error-worthy so just ignore the rest.
continue
case "user-agent":
// Match Go's http1 behavior: at most one
// User-Agent. If set to nil or empty string,
// then omit it. Otherwise if not mentioned,
// include the default (below).
didUA = true
if len(vv) < 1 {
continue
}
vv = vv[:1]
if vv[0] == "" {
continue
}
}
for _, v := range vv {
w.writeHeader(lowKey, v)
}
}
if shouldSendReqContentLength(req.Method, contentLength) {
w.writeHeader("content-length", strconv.FormatInt(contentLength, 10))
}
if !didUA {
panic("user agent info is missing")
}
return w.hbuf.Bytes(), nil
}
func (w *requestWriter) writeHeader(name, value string) {
//fmt.Printf("http2: Transport encoding header %q = %q\n", name, value)
w.henc.WriteField(hpack.HeaderField{Name: name, Value: value})
}
<|start_filename|>client.go<|end_filename|>
package quictun
import (
"bufio"
"crypto/tls"
"errors"
"fmt"
"log"
"math/rand"
"net"
"net/http"
"net/url"
"time"
"github.com/julienschmidt/quictun/internal/atomic"
"github.com/julienschmidt/quictun/internal/socks"
"golang.org/x/net/http2"
"golang.org/x/net/http2/hpack"
quic "github.com/lucas-clemente/quic-go"
)
const protocolIdentifier = "QTP/0.1"
var (
ErrInvalidResponse = errors.New("server returned an invalid response")
ErrInvalidSequence = errors.New("client sequence number invalid")
ErrNotAQuictunServer = errors.New("server does not seems to be a quictun server")
ErrWrongCredentials = errors.New("authentication credentials seems to be wrong")
)
// Client holds the configuration and state of a quictun client
type Client struct {
// config
ListenAddr string
TunnelAddr string
UserAgent string
TlsCfg *tls.Config
QuicConfig *quic.Config
DialTimeout time.Duration
// state
session quic.Session
connected atomic.Bool
// replay protection
clientID uint64
sequenceNumber uint32
// header
headerStream quic.Stream
hDecoder *hpack.Decoder
h2framer *http2.Framer
}
func (c *Client) generateClientID() {
// generate clientID
rand.Seed(time.Now().UnixNano())
c.clientID = rand.Uint64()
}
func (c *Client) connect() error {
authURL := c.TunnelAddr
// extract hostname from auth url
uri, err := url.ParseRequestURI(authURL)
if err != nil {
log.Fatal("Invalid Auth URL: ", err)
return err
}
hostname := authorityAddr(uri.Hostname(), uri.Port())
fmt.Println("Connecting to", hostname)
c.session, err = quic.DialAddr(hostname, c.TlsCfg, c.QuicConfig)
if err != nil {
log.Fatal("Dial Err: ", err)
return err
}
// once the version has been negotiated, open the header stream
c.headerStream, err = c.session.OpenStream()
if err != nil {
log.Fatal("OpenStream Err: ", err)
return err
}
//fmt.Println("Header StreamID:", c.headerStream.StreamID())
dataStream, err := c.session.OpenStreamSync()
if err != nil {
log.Fatal("OpenStreamSync Err: ", err)
}
//fmt.Println("Data StreamID:", dataStream.StreamID())
// build HTTP request
// The authorization credentials are automatically encoded from the URL
req, err := http.NewRequest("GET", authURL, nil)
if err != nil {
log.Fatal("NewRequest Err: ", err)
return err
}
req.Header.Set("User-Agent", c.UserAgent)
// request protocol upgrade
req.Header.Set("Connection", "Upgrade")
req.Header.Set("Upgrade", protocolIdentifier)
// replay protection
c.sequenceNumber++
req.Header.Set("QTP", fmt.Sprintf("%016X%08X", c.clientID, c.sequenceNumber))
rw := newRequestWriter(c.headerStream)
endStream := true //endStream := !hasBody
fmt.Println("requesting", authURL)
err = rw.WriteRequest(req, dataStream.StreamID(), endStream)
if err != nil {
log.Fatal("WriteHeaders Err: ", err)
}
fmt.Println("Waiting...")
// read frames from headerStream
c.h2framer = http2.NewFramer(nil, c.headerStream)
c.hDecoder = hpack.NewDecoder(4096, func(hf hpack.HeaderField) {})
frame, err := c.h2framer.ReadFrame()
if err != nil {
// c.headerErr = qerr.Error(qerr.HeadersStreamDataDecompressFailure, "cannot read frame")
log.Fatal("cannot read frame: ", err)
}
hframe, ok := frame.(*http2.HeadersFrame)
if !ok {
// c.headerErr = qerr.Error(qerr.InvalidHeadersStreamData, "not a headers frame")
log.Fatal("not a headers frame: ", err)
}
mhframe := &http2.MetaHeadersFrame{HeadersFrame: hframe}
mhframe.Fields, err = c.hDecoder.DecodeFull(hframe.HeaderBlockFragment())
if err != nil {
// c.headerErr = qerr.Error(qerr.InvalidHeadersStreamData, "cannot read header fields")
log.Fatal("cannot read header fields: ", err)
}
//fmt.Println("Frame for StreamID:", hframe.StreamID)
rsp, err := responseFromHeaders(mhframe)
if err != nil {
log.Fatal("responseFromHeaders: ", err)
}
switch rsp.StatusCode {
case http.StatusSwitchingProtocols:
header := rsp.Header
if header.Get("Connection") != "Upgrade" {
return ErrInvalidResponse
}
if header.Get("Upgrade") != protocolIdentifier {
return ErrNotAQuictunServer
}
return nil
case http.StatusUnauthorized, http.StatusForbidden:
return ErrWrongCredentials
case http.StatusBadRequest:
c.generateClientID()
return ErrInvalidSequence
default:
return ErrInvalidResponse
}
}
func (c *Client) watchCancel() {
session := c.session
if session == nil {
fmt.Println("session is nil")
return
}
ctx := session.Context()
if ctx == nil {
fmt.Println("ctx is nil")
return
}
// TODO: add graceful shutdown channel
<-ctx.Done()
fmt.Println("session closed", ctx.Err())
c.connected.Set(false)
}
func (c *Client) tunnelConn(local net.Conn) {
local.(*net.TCPConn).SetKeepAlive(true)
// TODO: SetReadTimeout(conn)
localRd := bufio.NewReader(local)
// initiate SOCKS connection
if err := socks.Auth(localRd, local); err != nil {
fmt.Println(err)
local.Close()
return
}
req, err := socks.PeekRequest(localRd)
if err != nil {
fmt.Println(err)
socks.SendReply(local, socks.StatusConnectionRefused, nil)
local.Close()
return
}
fmt.Println("request", req.Dest())
switch req.Cmd() {
case socks.CmdConnect:
fmt.Println("[Connect]")
if err = socks.SendReply(local, socks.StatusSucceeded, nil); err != nil {
fmt.Println(err)
local.Close()
return
}
default:
socks.SendReply(local, socks.StatusCmdNotSupported, nil)
local.Close()
return
}
// TODO: check connected status again and reconnect if necessary
stream, err := c.session.OpenStreamSync()
if err != nil {
fmt.Println("open stream err", err)
local.Close()
return
}
fmt.Println("Start proxying...")
go proxy(local, stream) // recv from stream and send to local
proxy(stream, localRd) // recv from local and send to stream
}
// Close closes the client
func (c *Client) close(err error) error {
if c.session == nil {
return nil
}
return c.session.Close(err)
}
// Run starts the client to accept incoming SOCKS connections, which are tunneled
// to the configured quictun server.
// The tunnel connection is opened only on-demand.
func (c *Client) Run() error {
c.generateClientID()
listener, err := net.Listen("tcp", c.ListenAddr)
if err != nil {
return fmt.Errorf("Failed to listen on %s: %s", c.ListenAddr, err)
}
fmt.Println("Listening for incoming SOCKS connection...")
// accept local connections and tunnel them
for {
conn, err := listener.Accept()
if err != nil {
log.Println("Accept Err:", err)
continue
}
fmt.Println("new SOCKS conn", conn.RemoteAddr().String())
if !c.connected.IsSet() {
err = c.connect()
if err != nil {
fmt.Println("Failed to connect to tunnel host:", err)
conn.Close()
continue
}
// start watcher which closes when canceled
go c.watchCancel()
c.connected.Set(true)
}
go c.tunnelConn(conn)
}
}
<|start_filename|>cmd/quictun_server/server.go<|end_filename|>
package main
import (
"flag"
"fmt"
"net/http"
"time"
"github.com/julienschmidt/quictun"
"github.com/julienschmidt/quictun/h2quic"
"github.com/julienschmidt/quictun/internal/lru"
"github.com/julienschmidt/quictun/internal/testdata"
)
const (
dialTimeout = 30
)
func main() {
// command-line args
listenFlag := flag.String("l", "localhost:6121", "QUIC listen address")
flag.Parse()
args := flag.Args()
if len(args) > 0 {
flag.Usage()
return
}
listenAddr := *listenFlag
quictunServer := quictun.Server{
DialTimeout: dialTimeout * time.Second,
SequenceCache: lru.New(10),
}
// Register the upgrade handler for the quictun protocol
h2quic.RegisterUpgradeHandler("QTP/0.1", quictunServer.Upgrade)
http.HandleFunc("/secret", func(w http.ResponseWriter, r *http.Request) {
// replay protection
if !quictunServer.CheckSequenceNumber(r.Header.Get("QTP")) {
w.Header().Set("Connection", "close")
w.WriteHeader(http.StatusBadRequest)
r.Close = true
return
}
// switch to quictun protocol (version 0.1)
w.Header().Set("Connection", "Upgrade")
w.Header().Set("Upgrade", "QTP/0.1")
w.WriteHeader(http.StatusSwitchingProtocols)
})
// HTTP server
// Implementations for production usage should be embedded in an existing web server instead.
server := h2quic.Server{
Server: &http.Server{Addr: listenAddr},
}
certFile, keyFile := testdata.GetCertificatePaths()
fmt.Printf("Start listening on %s...\n", listenAddr)
err := server.ListenAndServeTLS(certFile, keyFile)
if err != nil {
fmt.Println(err)
}
}
<|start_filename|>h2quic/doc.go<|end_filename|>
// Package h2quic is a drop-in replacement for quic-go's h2quic package with
// integrated quictun support.
package h2quic
<|start_filename|>proxy.go<|end_filename|>
package quictun
import (
"io"
)
func proxy(dst io.WriteCloser, src io.Reader) {
io.Copy(dst, src)
//src.Close()
dst.Close()
//fmt.Println("done proxying")
}
<|start_filename|>internal/socks/socks.go<|end_filename|>
package socks
import (
"bufio"
"errors"
"io"
"net"
"strconv"
)
// See https://www.ietf.org/rfc/rfc1928.txt
const socksVersion = 5
// Commands
const (
CmdConnect = 1
CmdBind = 2
CmdAssociate = 3
)
// Address types
const (
AtypIPv4 = 1
AtypDomain = 3
AtypIPv6 = 4
)
// Auth Methods
const (
AuthNoAuthenticationRequired = 0x00
AuthNoAcceptableMethod = 0xFF
)
// Status
const (
StatusSucceeded = 0
StatusGeneralFailure = 1
StatusConnectionNotAllowed = 2
StatusNetworkUnreachable = 3
StatusHostUnreachable = 4
StatusConnectionRefused = 5
StatusTtlExpired = 6
StatusCmdNotSupported = 7
StatusAddrNotSupported = 8
)
// Errors
var (
ErrNoAuth = errors.New("could not authenticate SOCKS connection")
ErrAtypNotSupported = errors.New("address type is not supported")
)
func Auth(rd *bufio.Reader, w io.Writer) error {
// 1 version
// 1 nmethods
// 1 method[nmethods] (we only read 1 at a time)
var header [3]byte
if _, err := io.ReadFull(rd, header[:]); err != nil {
return err
}
// check SOCKS version
if clVersion := header[0]; clVersion != socksVersion {
return errors.New("incompatible SOCKS version: " +
strconv.FormatUint(uint64(clVersion), 10))
}
// check auth
// currently only NoAuthenticationRequired is supported
acceptableAuth := false
if nMethods := header[1]; nMethods > 0 {
if method := header[2]; method == AuthNoAuthenticationRequired {
acceptableAuth = true
}
for n := uint8(1); n < nMethods; n++ {
// if we already have an acceptable auth method, we can skip all
if acceptableAuth {
if _, err := rd.Discard(int(nMethods - n)); err != nil {
return err
}
break
}
// keep checking until we find an acceptable auth method
method, err := rd.ReadByte()
if err != nil {
return err
}
if method == AuthNoAuthenticationRequired {
acceptableAuth = true
}
}
}
// send auth method selection to client
if !acceptableAuth {
w.Write([]byte{socksVersion, AuthNoAcceptableMethod})
return ErrNoAuth
}
_, err := w.Write([]byte{socksVersion, AuthNoAuthenticationRequired})
return err
}
type Request []byte
// PeekRequest peeks
func PeekRequest(rd *bufio.Reader) (Request, error) {
// 1 version
// 1 command
// 1 reserved
// 1 atyp
header, err := rd.Peek(4)
if err != nil {
return nil, err
}
// check SOCKS version
if clVersion := header[0]; clVersion != socksVersion {
return nil, errors.New("incompatible SOCKS version: " +
strconv.FormatUint(uint64(clVersion), 10))
}
// read address (IPv4, IPv6 or Domain)
const addrStart = 4
atyp := header[3]
switch atyp {
case AtypIPv4:
// read IPv4 address + port
buf, err := rd.Peek(addrStart + net.IPv4len + 2)
return Request(buf), err
case AtypDomain:
header, err = rd.Peek(addrStart + 1)
if err != nil {
return nil, err
}
domainLen := int(header[4])
// read domain name + port
buf, err := rd.Peek(addrStart + 1 + domainLen + 2)
return Request(buf), err
case AtypIPv6:
// read IPv6 address + port
buf, err := rd.Peek(addrStart + net.IPv6len + 2)
return Request(buf), err
default:
return nil, ErrAtypNotSupported
}
}
func (r Request) Cmd() byte {
return r[1]
}
func (r Request) Dest() Addr {
return Addr(r[3:])
}
// Addr is a pair of IPv4, IPv6 or Domain and a port
type Addr []byte
// Type returns the address type
func (a Addr) Type() byte {
return a[0]
}
// Port returns the port of the address
func (a Addr) Port() int {
var i = len(a) - 2
return (int(a[i]) << 8) | int(a[i+1])
}
// String formats the address as a host:port string
func (a Addr) String() string {
var host string
switch a.Type() {
case AtypIPv4, AtypIPv6:
host = (net.IP(a[1 : len(a)-2])).String()
case AtypDomain:
host = string(a[2 : len(a)-2])
default:
return ""
}
return net.JoinHostPort(host, strconv.Itoa(a.Port()))
}
// TODO: allow to pass buffer or writer
func NewIPAddr(ip net.IP, port int) Addr {
port1 := byte(port >> 8)
port2 := byte(port & 0xff)
if ip4 := ip.To4(); ip4 != nil {
return Addr{AtypIPv4,
ip4[0], ip4[1], ip4[2], ip4[3],
port1, port2,
}
}
if ip16 := ip.To16(); ip16 != nil {
return Addr{AtypIPv6,
ip16[0], ip16[1], ip16[2], ip16[3],
ip16[4], ip16[5], ip16[6], ip16[7],
ip16[8], ip16[9], ip16[10], ip16[11],
ip16[12], ip16[13], ip16[14], ip16[15],
port1, port2,
}
}
return nil
}
func SendReply(wr io.Writer, status byte, addr Addr) error {
// buffer to avoid allocations in the common cases
var buf [64]byte
reply := buf[:]
if len(addr)+3 > cap(buf) {
reply = make([]byte, len(addr)+3)
}
// 1 ver
reply[0] = socksVersion
// 1 rep
reply[1] = status
// 1 reserved
if addr == nil {
reply = reply[:4+net.IPv4len+2]
// reply[3] = AtypDomain
// reply[4] = 0
// 1 address type
reply[3] = AtypIPv4
// 4 IPv4
reply[4] = 0
reply[5] = 0
reply[6] = 0
reply[7] = 0
// 2 port
reply[8] = 0
reply[9] = 0
} else {
reply = reply[:3+len(addr)]
copy(reply[3:], addr)
}
// write reply
_, err := wr.Write(reply)
return err
}
func HandleRequest(req *Request) {
}
<|start_filename|>internal/lru/lru_test.go<|end_filename|>
package lru
import (
"fmt"
"testing"
)
func printList(l *LRU) {
cur := l.head
i := 0
for cur != nil {
fmt.Println(i, cur.key, cur.value)
cur = cur.next
i++
}
}
func TestLRU(t *testing.T) {
lru := NewLRU(2)
if n := len(lru.cache); n != 0 {
t.Fatalf("cache should be empty, actually has %d elements", n)
}
// insert first value
old := lru.Set(1337, 42)
if old != 0 {
t.Fatalf("old value for new key is %d, should be 0", old)
}
if n := len(lru.cache); n != 1 {
t.Fatalf("cache should have 1 element, actually has %d elements", n)
}
// overwrite existing value
old = lru.Set(1337, 43)
if old != 42 {
t.Fatalf("old value for existing key is %d, should be 42", old)
}
if n := len(lru.cache); n != 1 {
t.Fatalf("cache should have 1 element, actually has %d elements", n)
}
// insert second value
old = lru.Set(1338, 42)
if old != 0 {
t.Fatalf("old value for new key is %d, should be 0", old)
}
if n := len(lru.cache); n != 2 {
t.Fatalf("cache should have 2 elements, actually has %d elements", n)
}
if head := lru.head.key; head != 1338 {
t.Fatalf("newly inserted element is not head, key of head is %d", head)
}
// access the older value
if v1 := lru.Get(1337); v1 != 43 {
t.Fatalf("value of the first entry changed, should be 43, is %d", v1)
}
if head := lru.head.key; head != 1337 {
t.Fatalf("accessed element is not head, key of head is %d", head)
}
// overwrite existing value
old = lru.Set(1337, 42)
if old != 43 {
t.Fatalf("old value for existing key is %d, should be 43", old)
}
if n := len(lru.cache); n != 2 {
t.Fatalf("cache should have 2 elements, actually has %d elements", n)
}
//printList(lru)
// insert third value, removing the second
old = lru.Set(1339, 7)
if old != 0 {
t.Fatalf("old value for new key is %d, should be 0", old)
}
if n := len(lru.cache); n != 2 {
t.Fatalf("cache should have 2 elements, actually has %d elements", n)
}
if head := lru.head.key; head != 1339 {
t.Fatalf("newly inserted element is not head, key of head is %d", head)
}
//printList(lru)
}
<|start_filename|>doc.go<|end_filename|>
// Package quictun is an implementation of quictun. The package can be used to
// implement quictun clients and servers.
package quictun
<|start_filename|>h2quic/upgrade.go<|end_filename|>
package h2quic
import (
"errors"
quic "github.com/lucas-clemente/quic-go"
)
var noKnownUpgradeProtocol = errors.New("no known upgrade protocol")
// connectionUpgrade indicates that the connection has been upgraded to the
// protocol set within.
type connectionUpgrade struct {
protocol string
}
func (c *connectionUpgrade) Error() string {
return "connection has been upgraded to " + c.protocol
}
// UpgradeHandler is a function which can perform an upgrade to another protocol
// by modifying a given QUIC session.
type UpgradeHandler func(quic.Session)
// map of registered UpgradeHandlers
var upgradeHandlers = map[string]UpgradeHandler{}
// RegisterUpgradeHandler registers a handler function for the given protocol
// identifier, such as "PROT/1.2".
func RegisterUpgradeHandler(protocol string, handler UpgradeHandler) {
upgradeHandlers[protocol] = handler
}
<|start_filename|>internal/atomic/atomic.go<|end_filename|>
package atomic
import (
"sync/atomic"
)
// noCopy may be embedded into structs which must not be copied
// after the first use.
//
// See https://github.com/golang/go/issues/8005#issuecomment-190753527
// for details.
type noCopy struct{}
// Lock is a no-op used by -copylocks checker from `go vet`.
func (*noCopy) Lock() {}
// Bool is a wrapper around uint32 for usage as a boolean value with
// atomic access.
type Bool struct {
_noCopy noCopy
value uint32
}
// IsSet returns whether the current boolean value is true
func (b *Bool) IsSet() bool {
return atomic.LoadUint32(&b.value) > 0
}
// Set sets the value of the bool regardless of the previous value
func (b *Bool) Set(value bool) {
if value {
atomic.StoreUint32(&b.value, 1)
} else {
atomic.StoreUint32(&b.value, 0)
}
}
// TrySet sets the value of the bool and returns whether the value has changed
func (b *Bool) TrySet(value bool) bool {
if value {
return atomic.SwapUint32(&b.value, 1) == 0
}
return atomic.SwapUint32(&b.value, 0) > 0
}
<|start_filename|>cmd/quictun_client/client.go<|end_filename|>
package main
import (
"crypto/tls"
"flag"
"fmt"
"log"
"os"
"time"
"github.com/julienschmidt/quictun"
)
const (
// the User-Agent string is not observable, but should have the same length as a regular browser UA, e.g. that of Chrome
userAgent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.108 X-quictun/0.1"
// timeout for establishing connections to quictun server (in seconds)
dialTimeout = 30
)
func main() {
// command-line flags and args
listenFlag := flag.String("l", "localhost:1080", "local SOCKS listen address")
insecureFlag := flag.Bool("invalidCerts", false, "accept all invalid certs (insecure)")
flag.Usage = func() {
fmt.Printf("Usage: %s [OPTIONS] QUICTUN_URL\n", os.Args[0])
flag.PrintDefaults()
}
flag.Parse()
args := flag.Args()
if len(args) != 1 {
flag.Usage()
return
}
tunnelAddr := args[0]
// configure and run quictun client
client := quictun.Client{
ListenAddr: *listenFlag,
TunnelAddr: tunnelAddr,
UserAgent: userAgent,
DialTimeout: dialTimeout * time.Second,
TlsCfg: &tls.Config{InsecureSkipVerify: *insecureFlag},
}
log.Fatal(client.Run())
}
<|start_filename|>internal/lru/lru.go<|end_filename|>
package lru
import (
"sync"
)
type entry struct {
key uint64
value uint32
next *entry
}
// LRU is an LRU cache.
// Concurrent access is synchronized.
//
// A map is used as the index.
// The LRU order is tracked in a linked list.
type LRU struct {
capacity int // max number of entries
head *entry // first entry in LRU order
cache map[uint64]*entry // mapping of all key-value pairs
lock sync.Mutex // guards the whole struct
}
// NewLRU creates a new LRU cache with the given capacity
func New(capacity int) *LRU {
if capacity < 2 {
panic("capacity must be at least 2")
}
return &LRU{
capacity: capacity,
cache: make(map[uint64]*entry, capacity),
}
}
// Set sets the value for the given key. If an entry for the given key already
// exists, it is overwritten.
func (l *LRU) Set(key uint64, value uint32) (old uint32) {
l.lock.Lock()
if ep, ok := l.cache[key]; ok {
old = ep.value
ep.value = value
l.moveToFront(ep)
l.lock.Unlock()
return
}
// insert new entry for key
ep := new(entry)
ep.key = key
ep.value = value
ep.next = l.head
l.head = ep
l.cache[key] = ep
if len(l.cache) > l.capacity {
l.removeLast()
}
l.lock.Unlock()
return
}
// Get returns the current value for the given key.
// If no value for the given key exists, 0 is returned.
func (l *LRU) Get(key uint64) (value uint32) {
l.lock.Lock()
if ep, ok := l.cache[key]; ok {
value = ep.value
l.moveToFront(ep)
}
l.lock.Unlock()
return value
}
func (l *LRU) moveToFront(ep *entry) {
// move entry to front
if l.head != ep {
after := ep.next
cur := l.head
ep.next = cur
for cur.next != ep {
cur = cur.next
}
cur.next = after
l.head = ep
}
}
func (l *LRU) removeLast() {
// remove last entry from list
prev := l.head
last := prev.next
for last.next != nil {
prev = last
last = last.next
}
prev.next = nil
// remove from cache
delete(l.cache, last.key)
}
<|start_filename|>server.go<|end_filename|>
package quictun
import (
"bufio"
"fmt"
"net"
"strconv"
"time"
"github.com/julienschmidt/quictun/internal/socks"
quic "github.com/lucas-clemente/quic-go"
)
// SequenceCache is a cache for client sequence numbers.
// Implementations should limit the number of cached key-value pairs using a
// strategy like least recently used (LRU).
type SequenceCache interface {
Set(key uint64, value uint32) (old uint32)
Get(key uint64) (value uint32)
}
// Server is a quictun server which handles QUIC sessions upgraded to the
// quictun protocol.
type Server struct {
DialTimeout time.Duration
SequenceCache SequenceCache
}
// CheckSequenceNumber checks and caches the sequence number sent by a client
func (s *Server) CheckSequenceNumber(header string) bool {
// parse clientID and sequenceNumber from header value
if len(header) != 24 {
return false
}
clientID, err := strconv.ParseUint(header[:16], 16, 64)
if err != nil {
return false
}
sequenceNumber, err := strconv.ParseUint(header[16:], 16, 32)
if err != nil {
return false
}
// the new sequence number must be larger than any previously seen number
return s.SequenceCache.Set(clientID, uint32(sequenceNumber)) < uint32(sequenceNumber)
}
// Upgrade starts using a given QUIC session with the quictun protocol.
// The quictun server immediately starts accepting new QUIC streams and assumes
// them to speak the quictun protocol (QTP).
// The actual protocol upgrade (via a HTTP/2 request-response) is handled
// entirely by the web server.
func (s *Server) Upgrade(session quic.Session) {
for {
fmt.Println("Waiting for stream...")
stream, err := session.AcceptStream()
if err != nil {
fmt.Println("accept stream:", err)
session.Close(err)
return
}
go s.handleQuictunStream(stream)
}
}
func (s *Server) handleQuictunStream(stream quic.Stream) {
streamID := stream.StreamID()
fmt.Println("got stream", streamID)
streamRd := bufio.NewReader(stream)
req, err := socks.PeekRequest(streamRd)
if err != nil {
stream.Reset(err)
stream.Close()
fmt.Println("stream", streamID, ":", err)
return
}
switch req.Cmd() {
case socks.CmdConnect:
remote, err := net.DialTimeout("tcp", req.Dest().String(), s.DialTimeout)
if err != nil {
fmt.Printf("stream %d: %#v\n", streamID, err)
stream.Reset(nil)
stream.Close()
return
}
// remove request header from buffer
if _, err = streamRd.Discard(len(req)); err != nil {
stream.Reset(nil)
stream.Close()
remote.Close()
fmt.Println("stream", streamID, ":", err)
return
}
fmt.Println("Start proxying...")
go proxy(stream, remote) // recv from remote and send to stream
proxy(remote, streamRd) // recv from stream and send to remote
default:
socks.SendReply(stream, socks.StatusCmdNotSupported, nil)
stream.Reset(nil)
stream.Close()
return
}
}
| julienschmidt/quictun |
<|start_filename|>package.json<|end_filename|>
{
"name": "cleverbot-node",
"keywords": [
"cleverbot"
],
"description": "Cleverbot client for node.js",
"version": "0.3.10",
"license": "MIT",
"author": "<NAME> <<EMAIL>>",
"url": "https://github.com/fojas/cleverbot-node/",
"directories": {
"lib": "./lib",
"example": "./examples"
},
"repository": {
"type": "git",
"url": "https://github.com/fojas/cleverbot-node.git"
},
"main": "./lib/cleverbot.js",
"dependencies": {}
}
| fojas/cleverbot-node |
<|start_filename|>demo/index.js<|end_filename|>
import React from 'react';
import { render } from 'react-dom';
import TwentyTwenty from '../src';
const Uncontrolled = () => (
<TwentyTwenty
verticalAlign="bottom"
minDistanceToBeginInteraction={0}
maxAngleToBeginInteraction={Infinity}
left={<img alt="cat-1" src="http://exmoorpet.com/wp-content/uploads/2012/08/cat.png" width="200" />}
right={<img alt="cat-2" src="http://www.cats.org.uk/uploads/images/pages/photo_latest14.jpg" width="300" />}
slider={<div className="slider" />}
/>
);
class Controlled extends React.Component {
constructor() {
super()
this.state = { position: 0.5 };
this.setPosition = position => this.setState({ position });
}
render() {
const { position } = this.state;
return (
<React.Fragment>
<input
type="range"
min="0"
max="1"
step="0.01"
value={position}
onChange={e => this.setPosition(e.target.valueAsNumber)}
/>
<TwentyTwenty
verticalAlign="bottom"
minDistanceToBeginInteraction={0}
maxAngleToBeginInteraction={Infinity}
position={position}
onChange={this.setPosition}
left={<img alt="cat-1" src="http://exmoorpet.com/wp-content/uploads/2012/08/cat.png" width="200" />}
right={<img alt="cat-2" src="http://www.cats.org.uk/uploads/images/pages/photo_latest14.jpg" width="300" />}
slider={<div className="slider" />}
/>
</React.Fragment>
);
}
}
class App extends React.Component {
constructor() {
super();
this.state = { controlled: false };
this.setControlled = controlled => this.setState({ controlled })
}
render() {
return (
<React.Fragment>
{this.state.controlled ? <Controlled /> : <Uncontrolled />}
<label>
<input
type="checkbox"
onChange={e => this.setControlled(e.target.checked)}
/>
{" "}Use controlled component
</label>
</React.Fragment>
);
}
}
render(
<App />,
document.getElementById('main')
);
| jordiviva/react-twentytwenty |
<|start_filename|>Hotspot.cmd<|end_filename|>
@echo off
:start
cls
echo Choose an action below by entering its number.
echo [1] Setup or Modify
echo [2] Activate Hotspot
echo [3] Deactivate Hotspot
echo [4] List connected devices
set /p choice=
if %choice%==1 goto setup
if %choice%==2 goto activate
if %choice%==3 goto deactivate
if %choice%==4 goto manage
echo Unknown action, please just enter the number.
pause
goto start
:setup
netsh wlan stop hostednetwork >NUL
echo Enter the SSID of the hotspot
set /p set_ssid=
echo Enter the password for accessing the hotspot (8 characters minimum)
set /p set_pass=
echo Create a [t]emporary or [p]ermanent hotspot?
echo Temporary hotspots vanish after a restart
echo Enter the marked letter of your prefered option
set /p set_permanent=
netsh wlan set hostednetwork mode=allow ssid="%set_ssid%" key="%set_pass%"
netsh wlan start hostednetwork
netsh wlan show hostednetwork setting=security
if "%set_permanent%"=="p" (
echo netsh wlan start hostednetwork > "%AppData%\Microsoft\Windows\Start Menu\Programs\Startup\permanent_hotspot.cmd"
echo Hotspot set to permanent mode
)
if "%set_permanent%"=="t" (
del /f /q "%AppData%\Microsoft\Windows\Start Menu\Programs\Startup\permanent_hotspot.cmd" >NUL
echo Hotspot set to temporary mode
)
echo ------------------
echo If this is your first setup you need to open up the adapter settings in the Network and Sharing Center
echo Select your working internet connection, right click on it, go to Properties
echo Switch to the tab called Sharing, tick to share the connection and select your created hotspot
echo ------------------
pause
goto start
:activate
netsh wlan start hostednetwork
pause
goto start
:deactivate
netsh wlan stop hostednetwork
pause
goto start
:manage
@echo off
set hasClients=0
arp -a | findstr /r "192\.168\.[0-9]*\.[2-9][^0-9] 192\.168\.[0-9]*\.[0-9][0-9][^0-9] 192\.168\.[0-9]*\.[0-1][0-9][0-9]" >test.tmp
arp -a | findstr /r "192\.168\.[0-9]*\.2[0-46-9][0-9] 192\.168\.[0-9]*\.25[0-4]" >>test.tmp
for /F "tokens=1,2,3" %%i in (test.tmp) do call :process %%i %%j %%k
del test.tmp
echo Connected Clients
echo ------------------
if %hasClients%==0 echo No device is currently connected to your hotspot
if %hasClients%==1 (
type result.tmp
del result.tmp
)
echo ------------------
pause
goto start
:process
set VAR1=%1
ping -a %VAR1% -n 1 | findstr Pinging > loop1.tmp
for /F "tokens=1,2,3" %%i in (loop1.tmp) do call :process2 %%i %%j %%k
del loop1.tmp
goto :EOF
:process2
SET VAR2=%2
SET VAR3=%3
set hasClients=1
echo %VAR2% %VAR3% >>result.tmp
goto :EOF
:EOF
| fczero/Windows-Hotspot |
<|start_filename|>src/ppu/win_without_bg.asm<|end_filename|>
; Copyright (C) 2019 <NAME> (<EMAIL>)
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
; Tests enabling LCDC bit 6 but not bit 0
INCLUDE "inc/hardware.inc"
SECTION "vblank", ROM0[$40]
reti
SECTION "stat", ROM0[$48]
jr hstat_handler
SECTION "boot", ROM0[$100]
di
jp main
ds $150 - $104
SECTION "main", ROM0
main::
ld sp, $e000
call reset_registers
call reset_oam
call copy_font
; write tile using color 2 to tile $80
ld sp, $8810
ld hl, $ff00
REPT 8
push hl
ENDR
ld sp, $e000
call reset_vram
ld a, $80
call fill_vram_9c00
; copy hram code to hram
ld hl, hram_code
ld bc, (hram_code_end - hram_code) << 8 | low(hhram_code)
.copy_hram
ld a, [hli]
ldh [c], a
inc c
dec b
jr nz, .copy_hram
; select mode 0 and mode 2 interrupts
; actually, due to STAT IRQ blocking, a mode 2 interrupt cannot trigger
; if mode 0 is also selected. LYC, however, can.
; so, we select LYC and will update that register accordingly
; it's possible to overwrite the STAT register to select different interrupt sources,
ld a, STATF_MODE00 | STATF_LYC
ldh [rSTAT], a
; set window to partially cover screen
ld a, $30
ldh [rWX], a
ldh [rWY], a
ld a, %11000100
ldh [rBGP], a
ldh [rOBP0], a
ld a, LCDCF_ON | LCDCF_WINON | LCDCF_WIN9C00 | LCDCF_BG8000 | LCDCF_BG9800 | LCDCF_OBJON | LCDCF_OBJ8 | LCDCF_BGOFF
ldh [rLCDC], a
; clear pending interrupts
xor a
ldh [rIE], a
; enable vblank interrupt
ld a, IEF_VBLANK
ldh [rIE], a
ei
; first frame is weird, wait until it ends
; only vblank can trigger this
halt
; disable interrupts, as we don't want them except on certain scanlines
di
; the next section of the test only relies on the STAT interrupt
ld a, IEF_LCDC
ldh [rIE], a
ld a, $10
call test_hblank_len
ldh a, [hcounter]
ldh [hfirstcounter], a
ld a, $80
call test_hblank_len
ld hl, sprites
ld de, woam
ld c, $A0
.copy_oam
ld a, [hli]
ld [de], a
inc e
dec c
jr nz, .copy_oam
ld c, low(hfirstcounter)
ld hl, woam + 2
call print_value
inc c ; hcounter
call print_value
call hoamdma
; source code breakpoint - good time to take a screenshot to compare
ld b,b
.done
jr .done
print_value::
ldh a, [c]
and $f0
swap a
call .print
ldh a, [c]
and $0f
.print
cp $A
jr c, .digit
add a, "A" - "0" - $A
.digit
add a, "0"
ld [hli], a
inc l
inc l
inc l
ret
test_hblank_len::
; wait until given scanline
ld hl, rLY
.wait
cp [hl]
jr nz, .wait
inc a
ldh [rLYC], a
; we must be in mode 2 or 3, so the next interrupt that will occur should be Mode 0
; clear pending interrupts
xor a
ei ; enable interrupts, doesn't take effect until *after the next instruction*
ldh [rIF], a
halt ; wait for the Mode 0 interrupt (during which Mode 2 will occur)
ret
SECTION "stat handlers", ROM0[$68]
mode2_handler::
ldh [hcounter], a
ld a, low(($10000 - hstat_handler.jump) + mode0_handler)
ldh [hstat_handler + 1], a
ret ; return with ints disabled
mode0_handler::
ei ; allow mode 2 int to trigger
ld a, low(($10000 - hstat_handler.jump) + mode2_handler)
ldh [hstat_handler + 1], a
xor a
REPT 100
inc a
ENDR
ret
SECTION "hram code", ROM0
hram_code::
oamdma::
ld a, high(woam)
ldh [rDMA], a
ld a, 40
.wait
dec a
jr nz, .wait
ret
.end
stat_handler::
db $18 ; jr
db low(($10000 - hstat_handler.jump) + mode0_handler)
hram_code_end::
SECTION "hram", HRAM[$fff0]
hhram_code::
hoamdma::
ds oamdma.end - oamdma
hstat_handler::
ds 2
.jump
hfirstcounter::
db
hcounter::
db
SECTION "oam", ROM0
dspr: MACRO
db \1 + 16, \2 + 8, \3, \4
ENDM
sprites::
dspr 60, 76, "?", 0
dspr 60, 84, "?", 0
dspr 68, 76, "?", 0
dspr 68, 84, "?", 0
dspr 50, 50, "N", OAMF_PRI
dspr 50, 58, "O", OAMF_PRI
dspr 50, 68, "W", 0
dspr 50, 76, "I", 0
dspr 50, 84, "N", 0
dspr 60, 50, "1", 0
dspr 60, 58, "0", 0
dspr 60, 68, "$", 0
dspr 68, 50, "8", 0
dspr 68, 58, "0", 0
dspr 68, 68, "$", 0
.end
REPT $a0 - (.end - sprites)
db $ff
ENDR
SECTION "wram", WRAM0,ALIGN[8]
woam::
ds $a0
INCLUDE "inc/utils.asm"
<|start_filename|>src/dma/hdma_timing-C.asm<|end_filename|>
; Copyright (C) 2020 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
; Tests HDMA timing in single speed and double speed mode to see when HDMA
; should start/finish relative to STAT mode. Also tests HDMA duration
; using DIV.
; Verified results:
; pass: CGB, AGB 0/A/B/BE
; fail: DMG, MGB, SGB, SGB2
; untested: AGB AE
TITLE equs "hdma_timing-C"
REQUIRES_CGB = 1
INCLUDE "inc/base.asm"
SECTION "correct-results", ROMX
CorrectResults::
; testing with SCX = 1
DB $83, $80, $80, $82
DB $00, $ff, $ff, $ff
; testing with SCX = 2. HDMA is delayed due to longer mode 3.
DB $83, $80, $80, $82
DB $00, $00, $ff, $ff
; test the duration of HDMA using DIV
DB $01, $02, $01, $02
DB $03, $04, $03, $04
; testing with SCX = 1 in 2x speed mode
DB $83, $80, $80, $82
DB $00, $ff, $ff, $ff
; testing with SCX = 2 in 2x speed mode
DB $83, $80, $80, $82
DB $00, $00, $ff, $ff
; test the duration of HDMA using DIV in 2x speed mode
DB $03, $04, $03, $04
DB $07, $08, $07, $08
; @param \1 number of nops to delay before initialising HDMA transfer
; @param \2 number of nops to delay before reading of register \3
; @param \3 register to read after delaying
sub_test: MACRO
lcd_on
ld a, $c0
ldh [rHDMA1], a
ld a, $00
ldh [rHDMA2], a
ld a, $98
ldh [rHDMA3], a
ld a, $00
ldh [rHDMA4], a
nops \1
ld a, $80 ; copy 16 bytes using H-blank DMA
ldh [rHDMA5], a
nops \2
ldh a, [low(\3)]
store_result
call LcdOffSafe
ENDM
sub_test_group: MACRO
sub_test 104, 44, rSTAT
sub_test 104, 45, rSTAT
sub_test 104, 86, rSTAT
sub_test 104, 87, rSTAT
sub_test 104, 46, rHDMA5
sub_test 104, 47, rHDMA5
sub_test 104, 48, rHDMA5
sub_test 104, 49, rHDMA5
ENDM
sub_test_group2x: MACRO
sub_test 218, 107, rSTAT
sub_test 218, 108, rSTAT
sub_test 218, 191, rSTAT
sub_test 218, 192, rSTAT
sub_test 218, 109, rHDMA5
sub_test 218, 110, rHDMA5
sub_test 218, 111, rHDMA5
sub_test 218, 112, rHDMA5
ENDM
; @param \1 number of nops to delay before resetting DIV
; @param \2 number of nops to delay before initialising HDMA transfer
; @param \3 number of nops to delay before reading of register \4
; @param \4 register to read after delay \3
; @param \5 length in bytes
sub_test2: MACRO
lcd_on
nops \1
xor a
ldh [rDIV], a
ld a, $c0
ldh [rHDMA1], a
ld a, $00
ldh [rHDMA2], a
ld a, $98
ldh [rHDMA3], a
ld a, $00
ldh [rHDMA4], a
nops \2
ld a, $80 | ((\5 / 16) - 1)
ldh [rHDMA5], a
nops \3
ldh a, [low(\4)]
store_result
call LcdOffSafe
ENDM
SECTION "run-test", ROM0
RunTest::
ld a, 1
ldh [rSCX], a
sub_test_group
ld a, 2
ldh [rSCX], a
sub_test_group
ld a, 1
ldh [rSCX], a
sub_test2 70, 30, 60, rDIV, 16
sub_test2 70, 30, 61, rDIV, 16
ld a, 2
ldh [rSCX], a
sub_test2 70, 30, 60, rDIV, 16
sub_test2 70, 30, 61, rDIV, 16
ld a, 1
ldh [rSCX], a
sub_test2 70, 30, 179, rDIV, 32
sub_test2 70, 30, 180, rDIV, 32
ld a, 2
ldh [rSCX], a
sub_test2 70, 30, 179, rDIV, 32
sub_test2 70, 30, 180, rDIV, 32
call SwitchSpeed
ld a, 1
ldh [rSCX], a
sub_test_group2x
ld a, 2
ldh [rSCX], a
sub_test_group2x
ld a, 1
ldh [rSCX], a
sub_test2 140, 65, 145, rDIV, 16
sub_test2 140, 65, 146, rDIV, 16
ld a, 2
ldh [rSCX], a
sub_test2 140, 65, 145, rDIV, 16
sub_test2 140, 65, 146, rDIV, 16
ld a, 1
ldh [rSCX], a
sub_test2 140, 65, 384, rDIV, 32
sub_test2 140, 65, 385, rDIV, 32
ld a, 2
ldh [rSCX], a
sub_test2 140, 65, 384, rDIV, 32
sub_test2 140, 65, 385, rDIV, 32
ret
<|start_filename|>src/ppu/m3_scy_change2.asm<|end_filename|>
; Copyright (C) 2019 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
; Toggles the value of the SCY register during mode 3.
; Sprites are positioned to cause the write to occur on different T-cycles of
; the background tile fetch, showing when the change to the bit takes effect.
; Initiated by LCD STAT mode 2 interrupt in a field of NOPs.
init_lcdc_stat_int_test: MACRO
call init_bg_maps_alphabetical_9800
ld a, LCDCF_ON | LCDCF_WIN9C00 | LCDCF_WINOFF | LCDCF_BG8000 | LCDCF_BG9800 | LCDCF_OBJ8 | LCDCF_OBJON | LCDCF_BGON
ldh [rLCDC], a
ld d, 0
ld e, 4
ld hl, rSCY
ld [hl], d
ENDM
lcdc_stat_int: MACRO
nops 9
ld [hl], e
ld [hl], d
ENDM
INCLUDE "inc/lcdc_stat_int_base.asm"
<|start_filename|>Makefile<|end_filename|>
SRCDIR = src
OBJDIR = .o
DEPDIR = .d
BINDIR = build
SOURCES := $(wildcard $(SRCDIR)/*.asm) $(wildcard $(SRCDIR)/*/*.asm)
OBJECTS := $(SOURCES:$(SRCDIR)/%.asm=$(OBJDIR)/%.o)
DEPS := $(SOURCES:$(SRCDIR)/%.asm=$(DEPDIR)/%.d)
ROMS := $(SOURCES:$(SRCDIR)/%.asm=$(BINDIR)/%.gb)
all: $(ROMS)
$(ROMS): $(BINDIR)/%.gb : $(OBJDIR)/%.o
@mkdir -p $(@D)
rgblink -n $(basename $@).sym -m $(basename $@).map -o $@ $<
rgbfix -v -p 255 $@
$(OBJECTS): $(OBJDIR)/%.o : $(SRCDIR)/%.asm
@mkdir -p $(@D)
@mkdir -p $(@D:$(OBJDIR)/%=$(DEPDIR)/%)
rgbasm -i mgblib/ -M $(DEPDIR)/$*.d -o $@ $<
$(DEPS):
include $(wildcard $(DEPS))
zip:
rm mealybug-tearoom-tests.zip
cd build && zip -r ../mealybug-tearoom-tests.zip . -i *.gb *.sym
.PHONY: clean
clean:
rm -rf $(OBJDIR)
rm -rf $(DEPDIR)
rm -rf $(BINDIR)
<|start_filename|>inc/old_skool_outline_thick.asm<|end_filename|>
; ///////////////////////
; // //
; // File Attributes //
; // //
; ///////////////////////
; Filename: old-skool-outline-thick.png
; Pixel Width: 256px
; Pixel Height: 32px
; /////////////////
; // //
; // Tile Data //
; // //
; /////////////////
oldskooloutlinethick_tile_data:
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $3C,$3C,$7E,$66,$FF,$C3,$FF,$81,$FF,$E7,$3C,$24,$3C,$24,$3C,$3C
DB $3C,$3C,$3C,$24,$3C,$24,$FF,$E7,$FF,$81,$FF,$C3,$7E,$66,$3C,$3C
DB $1C,$1C,$1E,$16,$FF,$F3,$FF,$81,$FF,$81,$FF,$F3,$1E,$16,$1C,$1C
DB $38,$38,$78,$68,$FF,$CF,$FF,$81,$FF,$81,$FF,$CF,$78,$68,$38,$38
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$6C,$6C,$8A,$8A,$AC,$AC,$AA,$AA,$6C,$6C,$00,$00
DB $00,$00,$00,$00,$60,$60,$80,$80,$80,$80,$80,$80,$60,$60,$00,$00
DB $00,$00,$00,$00,$4A,$4A,$AA,$AA,$AE,$AE,$AE,$AE,$4A,$4A,$00,$00
DB $00,$00,$00,$00,$8A,$8A,$8A,$8A,$8E,$8E,$82,$82,$E2,$E2,$00,$00
DB $00,$00,$00,$00,$35,$35,$45,$45,$25,$25,$15,$15,$67,$67,$00,$00
DB $00,$00,$00,$00,$66,$66,$55,$55,$66,$66,$44,$44,$44,$44,$00,$00
DB $00,$00,$00,$00,$26,$26,$55,$55,$56,$56,$55,$55,$25,$25,$00,$00
DB $00,$00,$00,$00,$73,$73,$24,$24,$22,$22,$21,$21,$26,$26,$00,$00
DB $00,$00,$00,$00,$03,$03,$04,$04,$02,$02,$01,$01,$06,$06,$00,$00
DB $00,$00,$00,$00,$25,$25,$55,$55,$75,$75,$55,$55,$52,$52,$00,$00
DB $00,$00,$00,$00,$70,$70,$40,$40,$60,$60,$40,$40,$70,$70,$00,$00
DB $00,$00,$00,$00,$37,$37,$42,$42,$22,$22,$12,$12,$62,$62,$00,$00
DB $00,$00,$00,$00,$27,$27,$52,$52,$72,$72,$52,$52,$52,$52,$00,$00
DB $00,$00,$00,$00,$73,$73,$44,$44,$62,$62,$41,$41,$76,$76,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
DB $3C,$3C,$3C,$24,$3C,$24,$3C,$24,$3C,$3C,$3C,$24,$3C,$3C,$00,$00
DB $FE,$FE,$FE,$92,$FE,$92,$FE,$92,$FE,$FE,$00,$00,$00,$00,$00,$00
DB $7E,$7E,$FF,$CB,$FF,$81,$FF,$CB,$FF,$81,$FF,$CB,$7E,$7E,$00,$00
DB $FF,$E7,$FF,$81,$FF,$A7,$FF,$81,$FF,$E5,$FF,$81,$FF,$E7,$00,$00
DB $FF,$FF,$FF,$99,$FF,$93,$FF,$E7,$FF,$C9,$FF,$99,$FF,$FF,$00,$00
DB $7C,$7C,$FE,$C6,$FF,$93,$FF,$C5,$FF,$93,$FF,$C9,$7F,$7F,$00,$00
DB $3C,$3C,$3C,$24,$3C,$24,$3C,$24,$3C,$3C,$00,$00,$00,$00,$00,$00
DB $3C,$3C,$7C,$64,$7C,$4C,$78,$48,$7C,$4C,$7C,$64,$3C,$3C,$00,$00
DB $78,$78,$7C,$4C,$7C,$64,$3C,$24,$7C,$64,$7C,$4C,$78,$78,$00,$00
DB $FE,$FE,$FE,$AA,$FE,$C6,$FE,$82,$FE,$C6,$FE,$AA,$FE,$FE,$00,$00
DB $3C,$3C,$3C,$24,$FF,$E7,$FF,$81,$FF,$E7,$3C,$24,$3C,$3C,$00,$00
DB $00,$00,$00,$00,$00,$00,$3C,$3C,$7C,$64,$7C,$4C,$78,$78,$00,$00
DB $00,$00,$00,$00,$7E,$7E,$7E,$42,$7E,$7E,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$78,$78,$78,$48,$78,$48,$78,$78,$00,$00
DB $1E,$1E,$3E,$32,$7E,$62,$FE,$C6,$FC,$8C,$F8,$98,$F0,$F0,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$91,$FF,$89,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $7C,$7C,$7C,$44,$7C,$64,$3C,$24,$3C,$24,$3C,$24,$3C,$3C,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$91,$FF,$F3,$FF,$E7,$FF,$81,$FF,$FF,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$F1,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $FE,$FE,$FE,$92,$FE,$92,$FE,$82,$FE,$F2,$1E,$12,$1E,$1E,$00,$00
DB $FE,$FE,$FE,$82,$FE,$9E,$FF,$83,$FF,$F9,$FF,$83,$FE,$FE,$00,$00
DB $7E,$7E,$FE,$C2,$FE,$9E,$FF,$83,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $FF,$FF,$FF,$81,$FF,$F9,$0F,$09,$0F,$09,$0F,$09,$0F,$0F,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$C3,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $7F,$7F,$FF,$C1,$FF,$99,$FF,$C1,$7F,$79,$0F,$09,$0F,$0F,$00,$00
DB $3C,$3C,$3C,$24,$3C,$24,$3C,$3C,$3C,$24,$3C,$24,$3C,$3C,$00,$00
DB $3C,$3C,$3C,$24,$3C,$24,$3C,$3C,$7C,$64,$7C,$4C,$78,$78,$00,$00
DB $1F,$1F,$7F,$71,$FF,$C7,$FC,$9C,$FF,$C7,$7F,$71,$1F,$1F,$00,$00
DB $00,$00,$FF,$FF,$FF,$81,$FF,$FF,$FF,$81,$FF,$FF,$00,$00,$00,$00
DB $F8,$F8,$FE,$8E,$FF,$E3,$3F,$39,$FF,$E3,$FE,$8E,$F8,$F8,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$F9,$7F,$63,$3E,$3E,$3C,$24,$3C,$3C
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$93,$FF,$9F,$FF,$C1,$7F,$7F,$00,$00
DB $3C,$3C,$7E,$66,$7E,$42,$FF,$DB,$FF,$81,$FF,$99,$FF,$FF,$00,$00
DB $FE,$FE,$FF,$83,$FF,$99,$FF,$83,$FF,$99,$FF,$83,$FE,$FE,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$9F,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $FE,$FE,$FF,$83,$FF,$99,$FF,$99,$FF,$99,$FF,$83,$FE,$FE,$00,$00
DB $FF,$FF,$FF,$81,$FF,$9F,$FE,$82,$FF,$9F,$FF,$81,$FF,$FF,$00,$00
DB $FF,$FF,$FF,$81,$FF,$9F,$FE,$82,$FE,$9E,$F0,$90,$F0,$F0,$00,$00
DB $7E,$7E,$FE,$C2,$FF,$9F,$FF,$91,$FF,$99,$FF,$C1,$7F,$7F,$00,$00
DB $FF,$FF,$FF,$99,$FF,$99,$FF,$81,$FF,$99,$FF,$99,$FF,$FF,$00,$00
DB $7E,$7E,$7E,$42,$7E,$66,$3C,$24,$7E,$66,$7E,$42,$7E,$7E,$00,$00
DB $FF,$FF,$FF,$81,$FF,$E7,$3C,$24,$FC,$E4,$FC,$84,$FC,$FC,$00,$00
DB $FF,$FF,$FF,$99,$FF,$93,$FE,$86,$FF,$93,$FF,$99,$FF,$FF,$00,$00
DB $F0,$F0,$F0,$90,$F0,$90,$F0,$90,$FF,$9F,$FF,$81,$FF,$FF,$00,$00
DB $F7,$F7,$FF,$9D,$FF,$89,$FF,$95,$FF,$95,$FF,$95,$FF,$FF,$00,$00
DB $FF,$FF,$FF,$99,$FF,$89,$FF,$81,$FF,$91,$FF,$99,$FF,$FF,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$99,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $FE,$FE,$FF,$83,$FF,$99,$FF,$83,$FE,$9E,$F0,$90,$F0,$F0,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$99,$FF,$91,$FF,$C3,$7E,$7E,$00,$00
DB $FE,$FE,$FF,$83,$FF,$99,$FF,$83,$FF,$97,$FF,$99,$FF,$FF,$00,$00
DB $7F,$7F,$FF,$C1,$FF,$9F,$FF,$C3,$FF,$F9,$FF,$83,$FE,$FE,$00,$00
DB $FF,$FF,$FF,$81,$FF,$E7,$3C,$24,$3C,$24,$3C,$24,$3C,$3C,$00,$00
DB $FF,$FF,$FF,$99,$FF,$99,$FF,$99,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $FF,$FF,$FF,$99,$FF,$99,$FF,$C3,$7E,$42,$7E,$66,$3C,$3C,$00,$00
DB $FF,$FF,$FF,$9D,$FF,$9D,$FF,$95,$FF,$95,$FF,$C3,$7E,$7E,$00,$00
DB $FF,$FF,$FF,$99,$FF,$C3,$7E,$66,$FF,$C3,$FF,$99,$EF,$EF,$00,$00
DB $FF,$FF,$FF,$99,$FF,$99,$FF,$C3,$7E,$66,$3C,$24,$3C,$3C,$00,$00
DB $FF,$FF,$FF,$81,$FF,$F3,$7E,$66,$FF,$CF,$FF,$81,$FF,$FF,$00,$00
DB $7E,$7E,$7E,$42,$7E,$72,$1E,$12,$7E,$72,$7E,$42,$7E,$7E,$00,$00
DB $F0,$F0,$F8,$98,$FC,$8C,$FE,$C6,$7E,$62,$3E,$32,$1E,$1E,$00,$00
DB $7E,$7E,$7E,$42,$7E,$4E,$78,$48,$7E,$4E,$7E,$42,$7E,$7E,$00,$00
DB $3C,$3C,$7E,$66,$FF,$C3,$FF,$99,$FF,$FF,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$FF,$FF,$FF,$81,$FF,$FF,$00,$00
DB $78,$78,$7C,$4C,$7C,$44,$7C,$64,$3C,$3C,$00,$00,$00,$00,$00,$00
DB $3C,$3C,$7E,$66,$7E,$42,$FF,$DB,$FF,$81,$FF,$99,$FF,$FF,$00,$00
DB $FE,$FE,$FF,$83,$FF,$99,$FF,$83,$FF,$99,$FF,$83,$FE,$FE,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$9F,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $FE,$FE,$FF,$83,$FF,$99,$FF,$99,$FF,$99,$FF,$83,$FE,$FE,$00,$00
DB $FF,$FF,$FF,$81,$FF,$9F,$FE,$82,$FF,$9F,$FF,$81,$FF,$FF,$00,$00
DB $FF,$FF,$FF,$81,$FF,$9F,$FE,$82,$FE,$9E,$F0,$90,$F0,$F0,$00,$00
DB $7E,$7E,$FE,$C2,$FF,$9F,$FF,$91,$FF,$99,$FF,$C1,$7F,$7F,$00,$00
DB $FF,$FF,$FF,$99,$FF,$99,$FF,$81,$FF,$99,$FF,$99,$FF,$FF,$00,$00
DB $7E,$7E,$7E,$42,$7E,$66,$3C,$24,$7E,$66,$7E,$42,$7E,$7E,$00,$00
DB $FF,$FF,$FF,$81,$FF,$E7,$3C,$24,$FC,$E4,$FC,$84,$FC,$FC,$00,$00
DB $FF,$FF,$FF,$99,$FF,$93,$FE,$86,$FF,$93,$FF,$99,$FF,$FF,$00,$00
DB $F0,$F0,$F0,$90,$F0,$90,$F0,$90,$FF,$9F,$FF,$81,$FF,$FF,$00,$00
DB $F7,$F7,$FF,$9D,$FF,$89,$FF,$95,$FF,$95,$FF,$95,$FF,$FF,$00,$00
DB $FF,$FF,$FF,$99,$FF,$89,$FF,$81,$FF,$91,$FF,$99,$FF,$FF,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$99,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $FE,$FE,$FF,$83,$FF,$99,$FF,$83,$FE,$9E,$F0,$90,$F0,$F0,$00,$00
DB $7E,$7E,$FF,$C3,$FF,$99,$FF,$99,$FF,$91,$FF,$C3,$7E,$7E,$00,$00
DB $FE,$FE,$FF,$83,$FF,$99,$FF,$83,$FF,$97,$FF,$99,$FF,$FF,$00,$00
DB $7F,$7F,$FF,$C1,$FF,$9F,$FF,$C3,$FF,$F9,$FF,$83,$FE,$FE,$00,$00
DB $FF,$FF,$FF,$81,$FF,$E7,$3C,$24,$3C,$24,$3C,$24,$3C,$3C,$00,$00
DB $FF,$FF,$FF,$99,$FF,$99,$FF,$99,$FF,$99,$FF,$C3,$7E,$7E,$00,$00
DB $FF,$FF,$FF,$99,$FF,$99,$FF,$C3,$7E,$42,$7E,$66,$3C,$3C,$00,$00
DB $FF,$FF,$FF,$9D,$FF,$9D,$FF,$95,$FF,$95,$FF,$C3,$7E,$7E,$00,$00
DB $FF,$FF,$FF,$99,$FF,$C3,$7E,$66,$FF,$C3,$FF,$99,$EF,$EF,$00,$00
DB $FF,$FF,$FF,$99,$FF,$99,$FF,$C3,$7E,$66,$3C,$24,$3C,$3C,$00,$00
DB $FF,$FF,$FF,$81,$FF,$F3,$7E,$66,$FF,$CF,$FF,$81,$FF,$FF,$00,$00
DB $7C,$7C,$7E,$46,$7F,$73,$1F,$19,$7F,$73,$7E,$46,$7C,$7C,$00,$00
DB $3C,$3C,$3C,$24,$3C,$24,$3C,$24,$3C,$24,$3C,$24,$3C,$3C,$00,$00
DB $3E,$3E,$7E,$62,$FE,$CE,$F8,$98,$FE,$CE,$7E,$62,$3E,$3E,$00,$00
DB $7F,$7F,$FF,$C9,$FF,$93,$FE,$FE,$00,$00,$00,$00,$00,$00,$00,$00
DB $00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00,$00
<|start_filename|>inc/base.asm<|end_filename|>
; Copyright (C) 2020 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
SECTION "lib", ROMX
INCLUDE "mgblib/src/hardware.inc"
INCLUDE "mgblib/src/macros.asm"
IF DEF(REQUIRES_CGB) | ! DEF(USE_DMG_MODE)
enable_cgb_mode
ENDC
INCLUDE "mgblib/src/old_skool_outline_thick.asm"
INCLUDE "mgblib/src/display.asm"
INCLUDE "mgblib/src/print.asm"
INCLUDE "mgblib/src/misc/delay.asm"
INCLUDE "mgblib/src/serial/SerialSendByte.asm"
PUSHS
SECTION "results", WRAM0, ALIGN[8]
Results::
DS 254
ResultCounter::
DS 1
TestResult::
DS 1
POPS
; Writes byte in register `a` to [de] and increases
; the result counter
;
; @param a the value to store
; @param de the address to store the value
; @return de + 1
; @destroys af
store_result: MACRO
ld [de], a
inc de
ld a, [ResultCounter]
inc a
ld [ResultCounter], a
ENDM
SECTION "boot", ROM0[$100]
nop
jp Main
SECTION "header-remainder", ROM0[$14a]
ds $150-@, $0
SECTION "main", ROMX
Main::
di
ld sp, $fffe
push af
call ResetDisplay
call ResetCursor
pop af
IF DEF(REQUIRES_CGB)
cp $11
jp nz, NotCGB
ENDC
xor a
ld [ResultCounter], a
ld de, Results
call RunTest
call ResetDisplay
call LoadFont
call GeneratePaleHexDigits
ld de, TestTitle
call PrintString
ld a, [wPrintCursorAddress]
and $1f
jr z, .skip_extra_new_line
print_string_literal "\\n"
.skip_extra_new_line::
print_string_literal "\\n"
IF DEF(DISPLAY_RESULTS_ONLY)
call DisplayResults
ELSE
call CompareResults
jp Quit
ENDC
NotCGB::
call LoadFont
print_string_literal "CGB Required"
ld a, "F"
ld [TestResult], a
jp Quit
; Displays and compares Results to CorrectResults
;
; @param [Results] results recorded from test
; @param [CorrectResults] the correct results
; @param [ResultCounter] number of results to compare
CompareResults::
ld a, "P"
ld [TestResult], a
ld hl, Results
ld de, CorrectResults
ld a, [ResultCounter]
ld c, a
.loop
ld a, [de]
ld b, a ; b = correct result
ld a, [hl] ; a = result
push hl
push de
push bc
; print the result
push bc
push af
call PrintHexU8NoDollar
pop af
pop bc
cp b
jr z, .matched
; record the failure
ld a, "F"
ld [TestResult], a
; print the correct result and a space
ld a, b
call PrintPaleHexU8NoDollar
ld a, " "
call PrintCharacter
jr .continue
.matched:
print_string_literal " "
.continue:
pop bc
pop de
pop hl
inc hl
inc de
dec c
jr nz, .loop
inc a
; print a new line if not already on the first character of the line
call PrintNewLine
ld a, [wPrintCursorAddress]
and 31
jr z, .noExtraLine
call PrintNewLine
.noExtraLine::
ld a, [TestResult]
cp "P"
jr nz, .failed
print_string_literal "Passed"
ret
.failed
print_string_literal "Failed"
ret
; Display the results only without comparing
; to any values
DisplayResults::
ld hl, Results
ld a, [ResultCounter]
ld c, a
.loop
ld a, [hl]
push hl
push bc
call PrintHexU8NoDollar
print_string_literal " "
pop bc
pop hl
inc hl
dec c
jr nz, .loop
; turn lcd on and loop forever
lcd_on
wait_ly 145
wait_ly 144
ld b, b
.forever:
jr .forever
; Set magic register values, sends result via serial
; output, and loops forever
;
; @param [TestResult] if "P" then reports a passed result, otherwise failed
Quit::
lcd_on
wait_ly 145
ld a, [TestResult]
cp "P"
jr nz, .failed
ld b, 3
ld c, 5
ld d, 8
ld e, 13
ld h, 21
ld l, 34
jr .sendSerialResult
.failed
ld b, $42
ld c, b
ld d, b
ld e, b
ld h, b
ld l, b
.sendSerialResult:
ld a, b
call SerialSendByte
ld a, c
call SerialSendByte
ld a, d
call SerialSendByte
ld a, e
call SerialSendByte
ld a, h
call SerialSendByte
ld a, l
call SerialSendByte
wait_ly 144
xor a
ld b, b
.forever:
jr .forever
; Print a 8-bit value in hexidecimal with pale colours, 2 digits only
;
; @param a number to print
; @destroys af, bc, hl
PrintPaleHexU8NoDollar::
push af
swap a
call PrintPaleHexNibble
pop af
call PrintPaleHexNibble
ret
; Print a 4-bit value in hexidecimal with pale colours
;
; @param a number to print (low nibble)
; @destroys af, bc, hl
PrintPaleHexNibble::
and $0f
add 128 ; pale hex digits start at $9000
jp PrintCharacter
; Generates pale versions of the hex digits at $8800 based
; on the tile data from the ASCII font located at $9000
;
; @destroys af, c, de, hl
GeneratePaleHexDigits::
; generate numbers
ld hl, $9000 + ("0" * 16)
ld de, $8800
ld c, 10 * 8
.numbersLoop::
; read bitplane 1
inc hl
ld a, [hl-]
; write it as bitplane 0
ld [de], a
; zero out bitplane 1
inc de
xor a
ld [de], a
; advance to next row
inc de
inc hl
inc hl
dec c
jr nz, .numbersLoop
; generate letters
ld hl, $9000 + ("A" * 16)
ld de, $8800 + 10 * 16
ld c, 6 * 8
.lettersLoop::
; read bitplane 1
inc hl
ld a, [hl-]
; write it as bitplane 0
ld [de], a
; zero out bitplane 1
inc de
xor a
ld [de], a
; advance to next row
inc de
inc hl
inc hl
dec c
jr nz, .lettersLoop
ret
SwitchSpeed:
xor a
ldh [rIE], a
ld a, $30
ldh [rP1], a
ld a, $01
ldh [rKEY1], a
stop
ret
PUSHS
SECTION "header_title", ROM0[$134]
IF STRLEN("{TITLE}") > 15
DB STRUPR(STRSUB("{TITLE}", 0, 15))
ELSE
DB STRUPR("{TITLE}")
ENDC
SECTION "title", ROM0
TestTitle::
DB "{TITLE}", $00
POPS
<|start_filename|>src/mbc/mbc3_rtc.asm<|end_filename|>
; Copyright (C) 2020 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
TITLE equs "mbc3_rtc"
INCLUDE "inc/base.asm"
SECTION "correct-results", ROMX
CorrectResults::
DB $01, $00
DB $01, $00
DB $00, $3B
DB $40, $00, $00, $00, $3B
DB $00, $00, $00, $01, $00
DB $00, $00, $01, $00, $00
DB $00, $01, $00, $00, $00
DB $01, $00, $00, $00, $00
DB $80, $00, $00, $00, $00
DB $01, $00, $00, $3B
DB $01, $00, $01, $00
DB $10, $20
DB $00, $00
DB $C1, $1F, $3F, $3F
SECTION "header-mbc-type", ROM0[$147]
MBCType::
DB $10, $00, $03
RTC_SECONDS EQU $08
RTC_MINUTES EQU $09
RTC_HOURS EQU $0A
RTC_DAYS_LOW EQU $0B
RTC_DAYS_HIGH EQU $0C
ONE_SECOND EQU 1048576
HALF_SECOND EQU 524288
TOLERANCE EQU 128
latch_rtc_data: MACRO
xor a
ld [$6000], a
inc a
ld [$6000], a
ENDM
set_rtc_register: MACRO
ld a, \1
ld [$4000], a
ld a, \2
ld [$a000], a
ENDM
read_rtc_register_and_store_result: MACRO
ld a, \1
ld [$4000], a
ld a, [$a000]
store_result
ENDM
subtest_seconds: MACRO
set_rtc_register RTC_SECONDS, 0
delay \1
latch_rtc_data
read_rtc_register_and_store_result RTC_SECONDS
ENDM
subtest_minutes_write_does_not_reset_counter: MACRO
set_rtc_register RTC_SECONDS, 59
delay HALF_SECOND
set_rtc_register RTC_MINUTES, 0
delay \1
latch_rtc_data
read_rtc_register_and_store_result RTC_MINUTES
read_rtc_register_and_store_result RTC_SECONDS
ENDM
subtest_overflow: MACRO
set_rtc_register RTC_SECONDS, 0
set_rtc_register RTC_MINUTES, \5
set_rtc_register RTC_HOURS, \4
set_rtc_register RTC_DAYS_LOW, \3
set_rtc_register RTC_DAYS_HIGH, \2
set_rtc_register RTC_SECONDS, 59
delay \1
latch_rtc_data
read_rtc_register_and_store_result RTC_DAYS_HIGH
read_rtc_register_and_store_result RTC_DAYS_LOW
read_rtc_register_and_store_result RTC_HOURS
read_rtc_register_and_store_result RTC_MINUTES
read_rtc_register_and_store_result RTC_SECONDS
ENDM
subtest_latching_does_not_reset_counter: MACRO
set_rtc_register RTC_SECONDS, 59
set_rtc_register RTC_MINUTES, 0
delay HALF_SECOND
latch_rtc_data
delay \1
latch_rtc_data
read_rtc_register_and_store_result RTC_MINUTES
read_rtc_register_and_store_result RTC_SECONDS
ENDM
subtest_disabling_timer_does_not_reset_counter: MACRO
set_rtc_register RTC_SECONDS, 0
delay HALF_SECOND
set_rtc_register RTC_DAYS_HIGH, $40 ; disable
delay ONE_SECOND
set_rtc_register RTC_DAYS_HIGH, $00 ; enable
delay \1 ; wait another half second +/- tolerance
latch_rtc_data
; total elapsed time is 2 seconds, but seconds should read back as 1
; because the timer was delayed for 1 second of that.
read_rtc_register_and_store_result RTC_SECONDS
ENDM
subtest_write_and_read: MACRO
set_rtc_register RTC_DAYS_HIGH, $40
set_rtc_register \1, \2
latch_rtc_data
read_rtc_register_and_store_result \1
set_rtc_register RTC_DAYS_HIGH, $00
ENDM
SECTION "run-test", ROM0
RunTest::
ld a, $0a
ld [$0000], a
set_rtc_register RTC_DAYS_HIGH, $00
TestSecondsIncrement::
subtest_seconds (ONE_SECOND + TOLERANCE)
subtest_seconds (ONE_SECOND - TOLERANCE)
TestMinutesWriteDoesNotResetCounter::
subtest_minutes_write_does_not_reset_counter (HALF_SECOND + TOLERANCE)
subtest_minutes_write_does_not_reset_counter (HALF_SECOND - TOLERANCE)
TestRegisterOverflows::
subtest_overflow (ONE_SECOND + TOLERANCE), $40, 0, 0, 0
subtest_overflow (ONE_SECOND + TOLERANCE), 0, 0, 0, 0
subtest_overflow (ONE_SECOND + TOLERANCE), 0, 0, 0, 59
subtest_overflow (ONE_SECOND + TOLERANCE), 0, 0, 23, 59
subtest_overflow (ONE_SECOND + TOLERANCE), 0, 255, 23, 59
subtest_overflow (ONE_SECOND + TOLERANCE), 1, 255, 23, 59
TestLatchingDoesNotResetCounter::
subtest_latching_does_not_reset_counter (HALF_SECOND + TOLERANCE)
subtest_latching_does_not_reset_counter (HALF_SECOND - TOLERANCE)
TestDisablingTimerDoesNotResetCounter::
subtest_disabling_timer_does_not_reset_counter (HALF_SECOND + TOLERANCE)
subtest_disabling_timer_does_not_reset_counter (HALF_SECOND - TOLERANCE)
TestSecondsWriteDoesNotDiscardElapsedTime::
set_rtc_register RTC_SECONDS, 59
set_rtc_register RTC_MINUTES, 0
delay ((ONE_SECOND * 2) + TOLERANCE)
set_rtc_register RTC_SECONDS, 0
latch_rtc_data
read_rtc_register_and_store_result RTC_MINUTES
read_rtc_register_and_store_result RTC_SECONDS
TestWrittenValueIsNotReadableUntilLatched::
set_rtc_register RTC_DAYS_HIGH, $40
set_rtc_register RTC_MINUTES, $10
latch_rtc_data
set_rtc_register RTC_MINUTES, $20
read_rtc_register_and_store_result RTC_MINUTES
latch_rtc_data
read_rtc_register_and_store_result RTC_MINUTES
set_rtc_register RTC_DAYS_HIGH, $00
TestOutOfBoundsWriteAndIncrement::
set_rtc_register RTC_DAYS_HIGH, $40
set_rtc_register RTC_HOURS, $00
set_rtc_register RTC_MINUTES, $3F
set_rtc_register RTC_SECONDS, 59
set_rtc_register RTC_DAYS_HIGH, $00
delay ONE_SECOND + TOLERANCE
latch_rtc_data
read_rtc_register_and_store_result RTC_HOURS
read_rtc_register_and_store_result RTC_MINUTES
TestRegisterSizes::
subtest_write_and_read RTC_DAYS_HIGH, $FF
subtest_write_and_read RTC_HOURS, $FF
subtest_write_and_read RTC_MINUTES, $FF
subtest_write_and_read RTC_SECONDS, $FF
xor a
ld [$0000], a
ret
<|start_filename|>src/ppu/m3_scx_low_3_bits.asm<|end_filename|>
; Copyright (C) 2018 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
; Tests how late SCX can be written to and have the lowest 3 bits of SCX still
; affect the rendering. The lowest 3 bits appear to be read at the start of
; the "B" of the first "B01s" read cycle.
; Initiated by STAT mode 2 LCDC interrupt in a field of NOPs.
INCLUDE "inc/hardware.inc"
INCLUDE "inc/utils.asm"
SECTION "wram", WRAM0
counter::
ds 1
SECTION "vblank", ROM0[$40]
jp vblank_handler
SECTION "lcdc", ROM0[$48]
jp hl
SECTION "boot", ROM0[$100]
nop
jp main
SECTION "main", ROM0[$150]
main::
di
ld sp, $fffe
xor a
ld [counter], a
call reset_registers
call reset_oam
; select mode 2 lcdc interrupt
ld a, $20
ldh [rSTAT], a
; enable vblank and lcdc interrupts
ld a, $03
ldh [rIE], a
; background is filled with spaces
ld a, 0
call fill_vram_9800
; fill the last column of the background with (R) tiles
ld hl, $9800 + 19
ld de, 32
ld a, $19
ld c, 18
.loop:
ld [hl], a
add hl, de
dec c
jr nz, .loop
ld a, LCDCF_ON | LCDCF_BG8000 | LCDCF_BG9800 | LCDCF_BGON
ldh [rLCDC], a
ld a, $fc
ldh [rBGP], a
; set hl so we can jump to it later
ld hl, lcdc_handler
ld c, LOW(rSCX)
; enable interrupts
ei
nop_slide:
REPT 1200
nop
ENDR
jp nop_slide
vblank_handler::
xor a
ldh [rSCX], a
; let it run for 10 frames
ld a, [counter]
inc a
cp 10
jp nz, .continue
; source code breakpoint - good time to take a screenshot to compare
ld b,b
.continue:
ld [counter], a
reti
lcdc_handler::
; 20 cycles interrupt dispatch + 4 cycles to jump here: 24
; set SCX to 0
xor a
ld [c], a
; delay 4 t-cycles on the first 72 rows of the screen,
; causing the SCX = 2 write to fail.
ldh a, [rLY]
cp $48
jr c, .delay
.delay:
nop
nop
; set SCX to 2
ld a, 2
ld [c], a
; reset the return address to the top of the nops loop
pop de
ei
jp nop_slide
<|start_filename|>src/ppu/m3_lcdc_win_map_change.asm<|end_filename|>
; Copyright (C) 2018 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
; Sets bit 6 (WIN_MAP) of LCDC register during mode 3 with sprites
; at different X coordinates
; Initiated by STAT mode 2 LCDC interrupt in a field of NOPs.
INCLUDE "inc/hardware.inc"
INCLUDE "inc/utils.asm"
SECTION "wram", WRAM0
counter::
ds 1
SECTION "vblank", ROM0[$40]
jp vblank_handler
SECTION "lcdc", ROM0[$48]
jp lcdc_handler
SECTION "boot", ROM0[$100]
nop
jp main
SECTION "main", ROM0[$150]
main::
di
ld sp, $fffe
xor a
ld [counter], a
call reset_registers
call reset_oam
; select mode 2 lcdc interrupt
ld a, $20
ldh [rSTAT], a
; enable vblank and lcdc interrupts
ld a, $03
ldh [rIE], a
; map at $9800 is filled with 0
ld a, $0
call fill_vram_9800
; map at $9c00 is filled with 1
ld a, $1
call fill_vram_9c00
; white tile at index 0
xor a
ld c, 16
ld hl, $9000
.tile_loop:
ld [hl+], a
dec c
jr nz, .tile_loop
; black tile at index 1
ld a, $ff
ld c, 16
.tile_loop2:
ld [hl+], a
dec c
jr nz, .tile_loop2
; use the (r) logo as a sprite
ld hl, sprite_data
ld c, 76 ; 19 sprites * 4
call oam_copy
; turn the screen on, $9800-$9BFF window tile map, window on, bg tile data $8800-$97FF,
; bg tile map $9800-$9BFF, obj size 8*8, obj display on, bg display on
ld b, $a3
; c has the same value, but with bit 6 set
ld c, b
set 6, c
ld a, 0
ldh [rWY], a
ld a, 7
ldh [rWX], a
ld a, $ff
ldh [rOBP0], a
ld a, $e4
ldh [rBGP], a
; load hl with address of LCDC register
ld hl, rLCDC
; set initial value
ld [hl], b
; enable interrupts
ei
nop_slide:
REPT 1200
nop
ENDR
vblank_handler::
; let it run for 10 frames
ld a, [counter]
inc a
cp 10
jp nz, .continue
; source code breakpoint - good time to take a screenshot to compare
ld b,b
.continue:
ld [counter], a
reti
lcdc_handler::
; 20 cycles interrupt dispatch + 12 cycles to jump here: 32
line_0_fix
REPT 9
nop
ENDR
; set the new value: 8 cycles
ld [hl], c
; restore old value
ld [hl], b
; reset the return address to the top of the nops loop
pop de
ld de, nop_slide
push de
reti
sprite_data::
DB $10, 00, $19, 0
DB $18, 01, $19, 0
DB $20, 02, $19, 0
DB $28, 03, $19, 0
DB $30, 04, $19, 0
DB $38, 05, $19, 0
DB $40, 06, $19, 0
DB $48, 07, $19, 0
DB $50, 08, $19, 0
DB $58, 09, $19, 0
DB $60, 10, $19, 0
DB $68, 11, $19, 0
DB $70, 12, $19, 0
DB $78, 13, $19, 0
DB $80, 14, $19, 0
DB $88, 15, $19, 0
DB $90, 16, $19, 0
DB $98, 17, $19, 0
<|start_filename|>inc/lcdc_stat_int_base.asm<|end_filename|>
; Copyright (C) 2019 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
INCLUDE "inc/hardware.inc"
INCLUDE "inc/utils.asm"
SECTION "wram", WRAM0
counter::
ds 1
SECTION "vblank_interrupt", ROM0[$40]
jp _vblank_handler
SECTION "lcdc_stat_interrupt", ROM0[$48]
jp _lcdc_stat_int_handler
SECTION "boot", ROM0[$100]
nop
jp main
SECTION "main", ROM0[$150]
main::
di
ld sp, $fffe
xor a
ld [counter], a
call reset_registers
call reset_oam
call reset_tile_maps
call copy_font
call set_default_dmg_palettes
call set_default_cgb_palettes
ld hl, aligned_sprite_data
ld c, aligned_sprite_data.end - aligned_sprite_data
call oam_copy
init_lcdc_stat_int_test
ld a, STATF_MODE10 ; mode 2 - OAM scan
ldh [rSTAT], a
ld a, IEF_VBLANK | IEF_LCDC
ldh [rIE], a
xor a
ldh [rIF], a
ei
jp nop_slide
_vblank_handler::
; let it run for 10 frames
ld a, [counter]
inc a
ld [counter], a
cp 10
jr nz, .skip
; source code breakpoint - good time to take a screenshot to compare
ld b,b
.skip:
add sp, 2
ei
jp nop_slide
_lcdc_stat_int_handler::
line_0_fix
; don't do anything after line 64
cp 64
jr nc, .skip
lcdc_stat_int
.skip:
add sp, 2
ei
jp nop_slide
init_bg_maps_alphabetical_9800::
ld a, 1
ld [rVBK], a
; background is filled with palette 1 on 8 rows
ld de, 8
ld hl, $9800
ld b, 1
.attr_outer:
ld c, 24
ld a, b
.attr_inner:
ld [hl+], a
dec c
jr nz, .attr_inner
add hl, de
bit 0, h ; bit 0 will be set when value of h register is $9900
jr z, .attr_outer
xor a
ld [rVBK], a
; background is filled with 'ABC...' on 8 rows
ld de, 8
ld hl, $9800
ld b, "A"
.tile_outer:
ld c, 24
ld a, b
.tile_inner:
ld [hl+], a
inc a
dec c
jr nz, .tile_inner
add hl, de
bit 0, h ; bit 0 will be set when value of h register is $9900
jr z, .tile_outer
ret
init_bg_maps_alphabetical_9c00::
ld a, 1
ld [rVBK], a
; background is filled with palette 2 on 8 rows
ld de, 8
ld hl, $9c00
ld b, 2
.attr_outer:
ld c, 24
ld a, b
.attr_inner:
ld [hl+], a
dec c
jr nz, .attr_inner
add hl, de
bit 0, h ; bit 0 will be set when value of h register is $9d00
jr z, .attr_outer
xor a
ld [rVBK], a
; background is filled with 'ZYX...' on 8 rows
ld de, 8
ld hl, $9c00
ld b, "Z"
.tile_outer:
ld c, 24
ld a, b
.tile_inner:
ld [hl+], a
dec a
dec c
jr nz, .tile_inner
add hl, de
bit 0, h ; bit 0 will be set when value of h register is $9d00
jr z, .tile_outer
ret
set_default_dmg_palettes::
ld a, $e4
ldh [rBGP], a
ldh [rOBP0], a
ldh [rOBP1], a
ret
set_default_cgb_palettes::
ld hl, cgb_background_palette
ld b, cgb_background_palette.end - cgb_background_palette
ld a, $80
ldh [rBCPS], a
call copy_bg_color_palette_data
ld hl, cgb_object_palette
ld b, cgb_object_palette.end - cgb_object_palette
ld a, $80
ldh [rOCPS], a
call copy_obj_color_palette_data
ret
nop_slide:
REPT 2400
nop
ENDR
; just in case we slide off the end
jp nop_slide
aligned_sprite_data::
DB $10, 1, " ", 0
DB $10, 10, "0", 0 ; 6 + 4 + 6 + 3 = 19
DB $18, 1, " ", 0
DB $18, 11, "1", 0 ; 6 + 4 + 6 + 2 = 18
DB $20, 1, " ", 0
DB $20, 12, "2", 0 ; 6 + 4 + 6 + 1 = 17
DB $28, 1, " ", 0
DB $28, 13, "3", 0 ; 6 + 4 + 6 + 0 = 16
DB $30, 5, " ", 0
DB $30, 10, "4", 0 ; 6 + 0 + 6 + 3 = 15
DB $38, 5, " ", 0
DB $38, 11, "5", 0 ; 6 + 0 + 6 + 2 = 14
DB $40, 5, " ", 0
DB $40, 12, "6", 0 ; 6 + 0 + 6 + 1 = 13
DB $48, 5, " ", 0
DB $48, 13, "7", 0 ; 6 + 0 + 6 + 0 = 12
.end:
cgb_background_palette::
cgb_color 31, 31, 31
cgb_color 21, 21, 21
cgb_color 11, 11, 11
cgb_color 0, 0, 0
; bg palette 1 - gbr
cgb_color 31, 31, 31
cgb_color 0, 31, 0
cgb_color 31, 0, 0
cgb_color 0, 0, 31
; bg palette 2 - cga
cgb_color 31, 31, 0
cgb_color 31, 0, 31
cgb_color 0, 31, 31
cgb_color 0, 0, 0
.end:
cgb_object_palette::
; obj palette 0 - shades of red
cgb_color 0, 0, 0 ; transparent
cgb_color 31, 31, 0
cgb_color 21, 21, 0
cgb_color 11, 11, 0
REPT 4 * 7
; remaining entries black
cgb_color 0, 0, 0
ENDR
.end:
<|start_filename|>src/ppu/m3_window_timing.asm<|end_filename|>
; Copyright (C) 2018 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
; On each row, WX is set to the value of LY, and then the background palette
; is changed to black during mode 3.
; For rows with smaller values of WX, there are fewer white pixels visible due
; to the palette change happening after the 6 T-cycle window startup fetch.
; For rows with larger values of WX, there are more white pixels visible due to
; the palette change happening before the 6 T-cycle window startup fetch.
; The stair step pattern is visible due to the palette being changed during
; the 6 T-cycle window startup fetch. Then when the window pixels are pushed
; out, the palette has been changed already.
; Initiated by STAT mode 2 LCDC interrupt in a field of NOPs.
INCLUDE "inc/hardware.inc"
INCLUDE "inc/utils.asm"
SECTION "wram", WRAM0
counter::
ds 1
SECTION "vblank", ROM0[$40]
jp vblank_handler
SECTION "lcdc", ROM0[$48]
jp lcdc_handler
SECTION "boot", ROM0[$100]
nop
jp main
SECTION "main", ROM0[$150]
main::
di
ld sp, $fffe
xor a
ld [counter], a
call reset_registers
call reset_oam
; select mode 2 lcdc interrupt
ld a, $20
ldh [rSTAT], a
; enable vblank and lcdc interrupts
ld a, $03
ldh [rIE], a
; map at $9800 is filled with 0
ld a, $0
call fill_vram_9800
; map at $9c00 is filled with 1
ld a, $1
call fill_vram_9c00
; light grey tile at index 0
ld c, 8
ld hl, $9000
.tile_loop:
ld a, $ff
ld [hl+], a
xor a
ld [hl+], a
dec c
jr nz, .tile_loop
; black tile at index 1
ld a, $ff
ld c, 16
.tile_loop2:
ld [hl+], a
dec c
jr nz, .tile_loop2
; turn the screen on, $9800-$9BFF window tile map, window on, tile data $8000-$8FFF,
; bg tile map $9800-$9BFF, obj size 8*8, obj display off, bg display on
ld a, $e1
ld [rLCDC], a
ld a, 7
ldh [rWX], a
ld a, 0
ldh [rWY], a
ld a, $e4
ldh [rOBP0], a
; load hl with address of BGP register
ld hl, rBGP
ld b, 0
ld c, $ff
; set initial value
ld [hl], b
; enable interrupts
ei
nop_slide:
REPT 1200
nop
ENDR
jp nop_slide
vblank_handler::
; let it run for 10 frames
ld a, [counter]
inc a
cp 10
jp nz, .continue
; source code breakpoint - good time to take a screenshot to compare
ld b, b
.continue:
ld [counter], a
reti
lcdc_handler::
; 20 cycles interrupt dispatch + 12 cycles to jump here: 32
line_0_fix
ldh a, [rLY]
ldh [rWX], a
ld [hl], b
nop
; set the new value: 8 cycles
ld [hl], c
; reset the return address to the top of the nops loop
pop de
ld de, nop_slide
push de
reti
<|start_filename|>inc/utils.asm<|end_filename|>
; Copyright (C) 2018 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
SECTION "utils", ROMX, BANK[1]
INCLUDE "inc/old_skool_outline_thick.asm"
wait_vblank::
ldh a, [rLY]
cp $90
jr nz, wait_vblank
ret
reset_oam::
ld hl, $fe00
ld a, $ff
ld c, 160
.loop:
ld [hl+], a
dec c
jr nz, .loop
ret
reset_registers::
call wait_vblank
xor a
ldh [rLCDC], a
ldh [rIF], a
ldh [rSCX], a
ldh [rSCY], a
; position the window off screen
ld a, 150
ldh [rWY], a
ret
reset_vram::
ld hl, $9800
xor a
.loop:
ld [hl+], a
bit 2, h ; bit 2 will be set when value of h register is $9c
jr z, .loop
ret
oam_copy::
ld de, $fe00
.loop:
ld a, [hl+]
ld [de], a
inc de
dec c
jr nz, .loop
ret
copy_font::
ld hl, oldskooloutlinethick_tile_data
ld de, $8000
.loop:
ld a, [hl+]
ld [de], a
inc de
bit 3, d ; bit 3 will be set when value of d register is $88
jr z, .loop
ret
fill_vram_9800::
ld hl, $9800
.loop:
ld [hl+], a
bit 2, h ; bit 2 will be set when value of h register is $9c
jr z, .loop
ret
fill_vram_9c00::
ld hl, $9c00
.loop:
ld [hl+], a
bit 5, h ; bit 5 will be set when value of h register is $a0
jr z, .loop
ret
fill_vram_8000::
ld hl, $8000
.loop:
ld [hl+], a
bit 5, h ; bit 5 will be set when value of h register is $90
jr z, .loop
ret
reset_tile_maps::
; select vram bank 1
ld a, 1
ldh [rVBK], a
; set palette 0 by default
ld a, 0
call fill_vram_9800
call fill_vram_9c00
; select vram bank 0
xor a
ldh [rVBK], a
; background defaults to " "
ld a, " "
call fill_vram_9800
call fill_vram_9c00
ret
; Input:
; A = value
; HL = destination
; BC = length in bytes
; Preserved:
; none
memset::
ld e, a
.loop:
ld a, e
ld [hl+], a
dec bc
ld a, b
cp c
jr nz, .loop
ret
; Input:
; HL = source
; DE = destination
; BC = length in bytes
; Preserved:
; none
memcpy::
.loop:
ld a, [hl+]
ld [de], a
inc de
dec bc
ld a, b
or c
jr nz, .loop
ret
; line 0 timing is different by 4 cycles, so jump only
; when on line 0
; 24 cycles (or 28 cycles when LY = 0)
line_0_fix: MACRO
ldh a, [rLY]
and a
jr nz, .target\@
.target\@
ENDM
; Output specified number of nops
; @param \1 Number of nops to output
nops: MACRO
REPT \1
nop
ENDR
ENDM
cgb_mode: MACRO
SECTION "cgb-mode", ROM0[$143]
db $80
ENDM
switch_speed:
xor a
ldh [rIE], a
ld a, $30
ldh [rP1], a
ld a, $01
ldh [rKEY1], a
stop
ret
; Delay for a specified number of M-cycles
; @param \1 Number of M-cycles to wait for
delay: MACRO
DELAY = (\1)
IF DELAY >= 100000
REPT DELAY / 100000
call Delay100000MCycles
ENDR
DELAY = DELAY % 100000
ENDC
IF DELAY >= 10000
call Delay10000MCycles - (3 * ((DELAY / 10000) - 1))
DELAY = DELAY % 10000
ENDC
IF DELAY >= 1000
call Delay1000MCycles - (3 * ((DELAY / 1000) - 1))
DELAY = DELAY % 1000
ENDC
IF DELAY >= 100
call Delay100MCycles - (3 * ((DELAY / 100) - 1))
DELAY = DELAY % 100
ENDC
IF DELAY >= 10
call Delay10MCycles - (3 * ((DELAY / 10) - 1))
DELAY = DELAY % 10
ENDC
IF DELAY > 0
nops DELAY
ENDC
ENDM
Delay100000MCycles::
call Delay10000MCycles
Delay90000MCycles::
call Delay10000MCycles
Delay80000MCycles::
call Delay10000MCycles
Delay70000MCycles::
call Delay10000MCycles
Delay60000MCycles::
call Delay10000MCycles
Delay50000MCycles::
call Delay10000MCycles
Delay40000MCycles::
call Delay10000MCycles
Delay30000MCycles::
call Delay10000MCycles
Delay20000MCycles::
call Delay10000MCycles
Delay10000MCycles::
call Delay1000MCycles
Delay9000MCycles::
call Delay1000MCycles
Delay8000MCycles::
call Delay1000MCycles
Delay7000MCycles::
call Delay1000MCycles
Delay6000MCycles::
call Delay1000MCycles
Delay5000MCycles::
call Delay1000MCycles
Delay4000MCycles::
call Delay1000MCycles
Delay3000MCycles::
call Delay1000MCycles
Delay2000MCycles::
call Delay1000MCycles
Delay1000MCycles::
call Delay100MCycles
Delay900MCycles::
call Delay100MCycles
Delay800MCycles::
call Delay100MCycles
Delay700MCycles::
call Delay100MCycles
Delay600MCycles::
call Delay100MCycles
Delay500MCycles::
call Delay100MCycles
Delay400MCycles::
call Delay100MCycles
Delay300MCycles::
call Delay100MCycles
Delay200MCycles::
call Delay100MCycles
Delay100MCycles::
call Delay10MCycles
Delay90MCycles::
call Delay10MCycles
Delay80MCycles::
call Delay10MCycles
Delay70MCycles::
call Delay10MCycles
Delay60MCycles::
call Delay10MCycles
Delay50MCycles::
call Delay10MCycles
Delay40MCycles::
call Delay10MCycles
Delay30MCycles::
call Delay10MCycles
Delay20MCycles::
call Delay10MCycles
Delay10MCycles::
ret
rgb_low_byte: MACRO
db low(\1 + (\2 << 5) + \3 << 10)
ENDM
rgb_high_byte: MACRO
db high(\1 + (\2 << 5) + \3 << 10)
ENDM
cgb_color: MACRO
rgb_low_byte \1, \2, \3
rgb_high_byte \1, \2, \3
ENDM
copy_obj_color_palette_data::
ld c, low(rOCPD)
jr copy_bg_color_palette_data.loop
copy_bg_color_palette_data::
ld c, low(rBCPD)
.loop:
ld a, [hl+]
ld [c], a
dec b
jr nz, .loop
ret
<|start_filename|>src/dma/hdma_during_halt-C.asm<|end_filename|>
; Copyright (C) 2020 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
; Tests whether HDMA runs during halt - it does not. The HDMA transfer occurs
; after exiting from halt.
; Verified results:
; pass: CGB, AGB 0/A/B/BE
; fail: DMG, MGB, SGB, SGB2
; untested: AGB AE
TITLE equs "hdma_during_halt-C"
REQUIRES_CGB = 1
INCLUDE "inc/base.asm"
SECTION "correct-results", ROMX
CorrectResults::
; testing with di + halt
DB $03, $04, $05, $06 ; LY
DB $c6, $c2, $c2, $c2 ; STAT
DB $01, $00, $ff, $ff ; HDMA5
DB $11, $be, $be, $be ; $980f
DB $22, $22, $ef, $ef ; $981f
; testing with ei + halt
DB $03, $04, $05, $06 ; LY
DB $c6, $c2, $c2, $c2 ; STAT
DB $01, $00, $ff, $ff ; HDMA5
DB $11, $be, $be, $ff ; $980f
DB $22, $22, $ef, $ff ; $981f
SECTION "source-data", WRAM0, ALIGN[8]
SourceData::
DS 32
SECTION "lcdc-stat", ROM0[$48]
jp hl
; @param \1 delay1 number of nops to delay before initialising HDMA transfer
; @param \2 address address to read after halt
sub_test_di: MACRO
; reset the results
ld a, $11
ld [$980f], a
ld a, $22
ld [$981f], a
; populate the source data
ld a, $be
ld [SourceData + $0f], a
ld a, $ef
ld [SourceData + $1f], a
ld a, 3
ldh [rLYC], a
ld a, STATF_LYC
ldh [rSTAT], a
ld a, IEF_LCDC
ldh [rIE], a
xor a
ldh [rIF], a
lcd_on
ld a, high(SourceData)
ldh [rHDMA1], a
ld a, low(SourceData)
ldh [rHDMA2], a
ld a, $98
ldh [rHDMA3], a
ld a, $00
ldh [rHDMA4], a
nops \1
ld a, $81 ; copy 32 bytes using H-blank DMA
ldh [rHDMA5], a
halt
ld a, [\2]
store_result
nops 101 - 9
ld a, [\2]
store_result
nops 101 - 9
ld a, [\2]
store_result
nops 101
ld a, [\2]
store_result
call LcdOffSafe
xor a
ldh [rLYC], a
ldh [rSTAT], a
ldh [rIF], a
ldh [rIE], a
ENDM
; @param \1 delay1 number of nops to delay before initialising HDMA transfer
; @param \2 address address to read after halt
sub_test_ei: MACRO
; reset the results
ld a, $11
ld [$980f], a
ld a, $22
ld [$981f], a
; populate the source data
ld a, $be
ld [SourceData + $0f], a
ld a, $ef
ld [SourceData + $1f], a
ld a, 3
ldh [rLYC], a
ld a, STATF_LYC
ldh [rSTAT], a
ld a, IEF_LCDC
ldh [rIE], a
lcd_on
ld a, high(SourceData)
ldh [rHDMA1], a
ld a, low(SourceData)
ldh [rHDMA2], a
ld a, $98
ldh [rHDMA3], a
ld a, $00
ldh [rHDMA4], a
nops \1
ld a, $81 ; copy 32 bytes using H-blank DMA
ldh [rHDMA5], a
xor a
ldh [rIF], a
ld hl, intr_handler\@
ei
nop
halt
intr_handler\@::
ld a, [\2]
store_result
nops 101 - 9
ld a, [\2]
store_result
nops 101 - 9
ld a, [\2]
store_result
nops 101
ld a, [\2]
store_result
; pop off the interrupt handler return address
pop af
call LcdOffSafe
xor a
ldh [rLYC], a
ldh [rSTAT], a
ldh [rIF], a
ldh [rIE], a
ENDM
SECTION "run-test", ROM0
RunTest::
di
sub_test_di 104, rLY
sub_test_di 104, rSTAT
sub_test_di 104, rHDMA5
sub_test_di 104, $980f
sub_test_di 104, $981f
sub_test_ei 104, rLY
sub_test_ei 104, rSTAT
sub_test_ei 104, rHDMA5
sub_test_ei 104, $980f
sub_test_ei 104, $981f
ret
<|start_filename|>src/ppu/m3_lcdc_obj_size_change.asm<|end_filename|>
; Copyright (C) 2018 <NAME> <<EMAIL>>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
; SOFTWARE.
; Toggles bit 2 of LCDC register (sprite size) during mode 3 with sprites
; at different X coordinates
; Initiated by STAT mode 2 LCDC interrupt in a field of NOPs.
INCLUDE "inc/hardware.inc"
SECTION "wram", WRAM0
counter::
ds 1
SECTION "vblank", ROM0[$40]
jp vblank_handler
SECTION "lcdc", ROM0[$48]
jp lcdc_handler
SECTION "boot", ROM0[$100]
nop
jp main
SECTION "main", ROM0[$150]
main::
di
ld sp, $fffe
xor a
ld [counter], a
call reset_registers
call reset_oam
; select mode 2 lcdc interrupt
ld a, $20
ldh [rSTAT], a
; enable vblank and lcdc interrupts
ld a, $03
ldh [rIE], a
call copy_font
; copy sprite data
ld hl, sprite_data
ld c, (sprite_data_end-sprite_data)
call oam_copy
; turn the screen on, $9800-$9BFF window tile map, window off, bg tile data $8800-$97FF,
; bg tile map $9800-$9BFF, obj size 8*16, obj display on, bg display on
ld a, $87
ld b, a
ldh [rLCDC], a
ld a, $e4
ldh [rOBP0], a
ld a, $00
ldh [rBGP], a
; load c with address of LCDC register
ld c, LOW(rLCDC)
; d contains lcdc value with obj size 8*8
ld d, $83
; enable interrupts
ei
nop_slide:
REPT 1200
nop
ENDR
vblank_handler::
; let it run for 10 frames
ld a, [counter]
inc a
cp 10
jp nz, .continue
; source code breakpoint - good time to take a screenshot to compare
ld b,b
.continue:
ld [counter], a
reti
lcdc_handler::
; 20 cycles interrupt dispatch + 12 cycles to jump here: 32
REPT 14
nop
ENDR
; set 8*8 sprites
ld a, d
ld [c], a
nop
nop
nop
; set 8*16 sprites
ld a, b
ld [c], a
; set 8*8 sprites
ld a, d
ld [c], a
; set 8*16 sprites
ld a, b
ld [c], a
; reset the return address to the top of the nops loop
pop hl
ld hl, nop_slide
push hl
reti
sprite_data::
DB $10, 00, $4c, 0
DB $20, 01, $4c, 0
DB $30, 02, $4c, 0
DB $40, 03, $4c, 0
DB $50, 04, $4c, 0
DB $60, 05, $4c, 0
DB $70, 06, $4c, 0
DB $80, 07, $4c, 0
DB $90, 08, $4c, 0
DB $10, $10, $4c, 0
DB $20, $11, $4c, 0
DB $30, $12, $4c, 0
DB $40, $13, $4c, 0
DB $50, $14, $4c, 0
DB $60, $15, $4c, 0
DB $70, $16, $4c, 0
DB $80, $17, $4c, 0
DB $90, $18, $4c, 0
; flipped vertically
DB $10, $20, $4c, $40
DB $20, $21, $4c, $40
DB $30, $22, $4c, $40
DB $40, $23, $4c, $40
DB $50, $24, $4c, $40
DB $60, $25, $4c, $40
DB $70, $26, $4c, $40
DB $80, $27, $4c, $40
DB $90, $28, $4c, $40
sprite_data_end::
INCLUDE "inc/utils.asm"
| gb-archive/mealybug-tearoom-tests |
<|start_filename|>tests/thread_tests.c<|end_filename|>
/* iso_alloc thread_tests.c
* Copyright 2021 - <EMAIL> */
#include "iso_alloc.h"
#include "iso_alloc_internal.h"
uint32_t allocation_sizes[] = {ZONE_16, ZONE_32, ZONE_64, ZONE_128,
ZONE_256, ZONE_512, ZONE_1024,
ZONE_2048, ZONE_4096, ZONE_8192};
uint32_t array_sizes[] = {16, 32, 64, 128, 256, 512, 1024,
2048, 4096, 8192, 16384, 32768};
/* This test can be repurposed for benchmarking
* against other allocators using LD_PRELOAD */
#if MALLOC_PERF_TEST
#define alloc_mem malloc
#define calloc_mem calloc
#define realloc_mem realloc
#define free_mem free
#else
#define alloc_mem iso_alloc
#define calloc_mem iso_calloc
#define realloc_mem iso_realloc
#define free_mem iso_free
#endif
const int32_t ALLOC = 0;
const int32_t REALLOC = 1;
const int32_t CALLOC = 2;
uint32_t times;
void *allocate(void *_type) {
size_t array_size;
size_t allocation_size;
int32_t alloc_count = 0;
int32_t type = *((int32_t *) _type);
for(int o = 0; o < times; o++) {
for(int i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
for(int z = 0; z < sizeof(allocation_sizes) / sizeof(uint32_t); z++) {
array_size = array_sizes[i];
allocation_size = allocation_sizes[z];
void *p[array_size];
memset(p, 0x0, array_size);
for(int y = 0; y < array_size; y++) {
if(allocation_size == 0) {
allocation_size = allocation_sizes[(rand() % sizeof(allocation_sizes) / sizeof(uint32_t))] + (rand() % 32);
}
void *d = NULL;
if(type == ALLOC) {
p[y] = alloc_mem(allocation_size);
} else if(type == REALLOC) {
d = (void *) alloc_mem(allocation_size / 2);
p[y] = realloc_mem(d, allocation_size);
} else if(type == CALLOC) {
p[y] = calloc_mem(1, allocation_size);
}
if(p[y] == NULL) {
LOG_AND_ABORT("Failed to allocate %ld bytes after %d total allocations", allocation_size, alloc_count);
}
alloc_count++;
memset(p[y], 0x41, allocation_size);
/* Randomly free some allocations */
if((rand() % 5) > 1) {
free_mem(p[y]);
p[y] = NULL;
}
}
/* Free the remaining allocations */
for(int r = 0; r < array_size; r++) {
if(p[r] != NULL) {
free_mem(p[r]);
}
}
}
}
}
return OK;
}
void run_test_threads() {
#if THREAD_SUPPORT
pthread_t t;
pthread_t tt;
pthread_t ttt;
pthread_create(&t, NULL, allocate, (void *) &ALLOC);
pthread_create(&tt, NULL, allocate, (void *) &REALLOC);
pthread_create(&ttt, NULL, allocate, (void *) &CALLOC);
pthread_join(t, NULL);
pthread_join(tt, NULL);
pthread_join(ttt, NULL);
pthread_exit(NULL);
#endif
}
int main(int argc, char *argv[]) {
if(argc != 2) {
times = 1;
} else {
times = atol(argv[1]);
}
run_test_threads();
iso_alloc_detect_leaks();
iso_verify_zones();
return OK;
}
<|start_filename|>tests/tests.cpp<|end_filename|>
/* iso_alloc tests.cpp
* Copyright 2021 - <EMAIL> */
#include <memory>
#include "iso_alloc.h"
#include "iso_alloc_internal.h"
using namespace std;
uint32_t allocation_sizes[] = {ZONE_16, ZONE_32, ZONE_64, ZONE_128,
ZONE_256, ZONE_512, ZONE_1024,
ZONE_2048, ZONE_4096, ZONE_8192};
uint32_t array_sizes[] = {16, 32, 64, 128, 256, 512, 1024,
2048, 4096, 8192, 16384, 32768, 65536};
int32_t alloc_count;
class Base {
public:
int32_t type;
char *str;
};
class Derived : Base {
public:
Derived(int32_t i) {
count = i;
type = count * count;
str = (char *) iso_alloc(1024);
memcpy(str, "AAAAA", 5);
}
~Derived() {
count = 0;
type = 0;
iso_free(str);
}
uint32_t count;
};
int allocate(size_t array_size, size_t allocation_size) {
void *p[array_size];
memset(p, 0x0, array_size);
for(size_t i = 0; i < array_size; i++) {
if(allocation_size == 0) {
allocation_size = allocation_sizes[(rand() % sizeof(allocation_sizes) / sizeof(uint32_t))] + (rand() % 32);
}
p[i] = new uint8_t[allocation_size];
if(p[i] == NULL) {
LOG_AND_ABORT("Failed to allocate %ld bytes after %d total allocations", allocation_size, alloc_count);
}
alloc_count++;
/* Randomly free some allocations */
if((rand() % 2) == 1) {
delete[](uint8_t *) p[i];
p[i] = NULL;
}
}
/* Free the remaining allocations */
for(size_t i = 0; i < array_size; i++) {
if(p[i] != NULL) {
delete[](uint8_t *) p[i];
}
}
return OK;
}
int main(int argc, char *argv[]) {
char *a = (char *) iso_alloc(100);
iso_free(a);
auto d = std::make_unique<Derived>(100);
return 0;
for(size_t i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
for(size_t z = 0; z < sizeof(allocation_sizes) / sizeof(uint32_t); z++) {
allocate(array_sizes[i], allocation_sizes[z]);
}
}
for(size_t i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
allocate(array_sizes[i], 0);
Base *b = new Base();
delete b;
auto d = std::make_unique<Derived>(i);
}
iso_verify_zones();
return 0;
}
<|start_filename|>android/jni/Android.mk<|end_filename|>
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_CFLAGS := -DTHREAD_SUPPORT=1 -pthread -DTHREAD_CACHE=1 \
-DPRE_POPULATE_PAGES=0 -DSMALL_MEM_STARTUP=0 -DSANITIZE_CHUNKS=0 \
-DFUZZ_MODE=0 -DPERM_FREE_REALLOC=0 -DDISABLE_CANARY=0 -Werror \
-pedantic -Wno-pointer-arith -Wno-gnu-zero-variadic-macro-arguments \
-Wno-format-pedantic -DMALLOC_HOOK=1 -fvisibility=hidden -std=c11 \
-DALLOC_SANITY=0 -DUNINIT_READ_SANITY=0 -DCPU_PIN=0 -DEXPERIMENTAL=0 \
-DUAF_PTR_PAGE=0 -DVERIFY_BIT_SLOT_CACHE=0 -DNAMED_MAPPINGS=1 -fPIC \
-shared -DDEBUG=1 -DLEAK_DETECTOR=1 -DMEM_USAGE=1 \
-g -ggdb3 -fno-omit-frame-pointer
LOCAL_SRC_FILES := ../../src/iso_alloc.c ../../src/iso_alloc_printf.c ../../src/iso_alloc_random.c \
../../src/iso_alloc_search.c ../../src/iso_alloc_interfaces.c ../../src/iso_alloc_profiler.c \
../../src/iso_alloc_sanity.c ../../src/malloc_hook.c
LOCAL_C_INCLUDES := ../../include/
LOCAL_MODULE := libisoalloc
include $(BUILD_SHARED_LIBRARY)
<|start_filename|>tests/unaligned_free.c<|end_filename|>
/* iso_alloc unaligned_free.c
* Copyright 2021 - <EMAIL> */
#include "iso_alloc.h"
#include "iso_alloc_internal.h"
int main(int argc, char *argv[]) {
void *p = iso_alloc(128);
p += 1;
iso_free(p);
return OK;
}
<|start_filename|>src/iso_alloc_random.c<|end_filename|>
/* iso_alloc_random.c - A secure memory allocator
* Copyright 2021 - <EMAIL> */
/* Contributed by <NAME> (@oreparaz)
* https://github.com/struct/isoalloc/pull/5 */
#if __linux__
#include <linux/random.h>
#include <sys/syscall.h>
#elif __APPLE__
#include <Security/SecRandom.h>
#else
#error "unknown OS"
#endif
#include "iso_alloc_internal.h"
INTERNAL_HIDDEN uint64_t rand_uint64(void) {
uint64_t val = 0;
int ret = 0;
/* In modern versions of glibc (>=2.25) we can call getrandom(),
* but older versions of glibc are still in use as of writing this.
* Use the raw system call as a lower common denominator.
* We give up on checking the return value. The alternative would be
* to crash. We prefer here to keep going with degraded randomness. */
#if __linux__
ret = syscall(SYS_getrandom, &val, sizeof(val), GRND_NONBLOCK) != sizeof(val);
#elif __APPLE__
ret = SecRandomCopyBytes(kSecRandomDefault, sizeof(val), &val);
#endif
#if ABORT_NO_ENTROPY
if(ret != 0) {
LOG_AND_ABORT("Unable to gather enough entropy");
}
#endif
return val;
}
<|start_filename|>tests/uaf.c<|end_filename|>
/* iso_alloc uaf.c
* Copyright 2021 - <EMAIL> */
#include "iso_alloc.h"
#include "iso_alloc_internal.h"
#if defined(UAF_PTR_PAGE) && !defined(ALLOC_SANITY)
/* This test should be run manually. You need to enable UAF_PTR_PAGE
* and then disable the sampling logic in iso_alloc. */
typedef struct test {
char *str;
} test_t;
int main(int argc, char *argv[]) {
void *str = iso_alloc(32);
test_t *test = (test_t *) iso_alloc(1024);
test->str = str;
memcpy(str, "a string!", 9);
iso_free(str);
/* Dereference a pointer that should have been
* detected and overwritten with UAF_PTR_PAGE */
LOG("Attempting to dereference test->str.\nWe should fault on %x", UAF_PTR_PAGE_ADDR);
LOG("%s", test->str);
iso_free(test);
return OK;
}
#else
int main(int argc, char *argv[]) {
return 0;
}
#endif
<|start_filename|>include/iso_alloc.h<|end_filename|>
/* iso_alloc.h - A secure memory allocator
* Copyright 2021 - <EMAIL> */
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#ifndef EXTERNAL_API
#define EXTERNAL_API __attribute__((visibility("default")))
#endif
#define NO_DISCARD __attribute__((warn_unused_result))
typedef void iso_alloc_zone_handle;
#if CPP_SUPPORT
extern "C" {
#endif
EXTERNAL_API NO_DISCARD void *iso_alloc(size_t size);
EXTERNAL_API NO_DISCARD void *iso_calloc(size_t nmemb, size_t size);
EXTERNAL_API void iso_free(void *p);
EXTERNAL_API void iso_free_permanently(void *p);
EXTERNAL_API NO_DISCARD void *iso_realloc(void *p, size_t size);
EXTERNAL_API size_t iso_chunksz(void *p);
EXTERNAL_API NO_DISCARD char *iso_strdup(const char *str);
EXTERNAL_API NO_DISCARD char *iso_strdup_from_zone(iso_alloc_zone_handle *zone, const char *str);
EXTERNAL_API NO_DISCARD char *iso_strndup(const char *str, size_t n);
EXTERNAL_API NO_DISCARD char *iso_strndup_from_zone(iso_alloc_zone_handle *zone, const char *str, size_t n);
EXTERNAL_API NO_DISCARD iso_alloc_zone_handle *iso_alloc_from_zone(iso_alloc_zone_handle *zone, size_t size);
EXTERNAL_API NO_DISCARD iso_alloc_zone_handle *iso_alloc_new_zone(size_t size);
EXTERNAL_API void iso_alloc_destroy_zone(iso_alloc_zone_handle *zone);
EXTERNAL_API void iso_alloc_protect_root();
EXTERNAL_API void iso_alloc_unprotect_root();
EXTERNAL_API uint64_t iso_alloc_detect_zone_leaks(iso_alloc_zone_handle *zone);
EXTERNAL_API uint64_t iso_alloc_detect_leaks();
EXTERNAL_API uint64_t iso_alloc_zone_mem_usage(iso_alloc_zone_handle *zone);
EXTERNAL_API uint64_t iso_alloc_mem_usage();
EXTERNAL_API void iso_verify_zones();
EXTERNAL_API void iso_verify_zone(iso_alloc_zone_handle *zone);
#if EXPERIMENTAL
EXTERNAL_API void iso_alloc_search_stack(void *p);
#endif
#if CPP_SUPPORT
}
#endif
<|start_filename|>tests/big_double_free.c<|end_filename|>
/* iso_alloc big_double_free.c
* Copyright 2021 - <EMAIL> */
#include "iso_alloc.h"
#include "iso_alloc_internal.h"
int main(int argc, char *argv[]) {
void *p = iso_alloc(SMALL_SZ_MAX + 1);
iso_free(p);
void *z = iso_alloc(SMALL_SZ_MAX + 1);
iso_free(p);
iso_free(z);
return OK;
}
<|start_filename|>tests/zero_alloc.c<|end_filename|>
/* iso_alloc zero_alloc.c
* Copyright 2021 - <EMAIL> */
#include "iso_alloc.h"
#include "iso_alloc_internal.h"
int main(int argc, char *argv[]) {
void *p = iso_alloc(0);
memcpy(p, "0x41", 1);
iso_free(p);
return 0;
}
<|start_filename|>Makefile<|end_filename|>
## Isolation Alloc Makefile
## Copyright 2021 - <EMAIL>
CC = clang
CXX = clang++
## Security flags can affect performance
## SANITIZE_CHUNKS - Clear user chunks upon free
## FUZZ_MODE - Call verify_all_zones upon alloc/free, never reuse custom zones
## PERM_FREE_REALLOC - Permanently free any realloc'd chunk
## DISABLE_CANARY - Disables the use of canaries, improves performance
SECURITY_FLAGS = -DSANITIZE_CHUNKS=0 -DFUZZ_MODE=0 -DPERM_FREE_REALLOC=0 -DDISABLE_CANARY=0
## Enable abort() when isoalloc can't gather enough entropy.
ABORT_NO_ENTROPY = -DABORT_NO_ENTROPY=1
## This enables Address Sanitizer support for manually
## poisoning and unpoisoning zones. It adds significant
## performance and memory overhead
## This is slow, and it's incompatible with other sanitizers
#ENABLE_ASAN = -fsanitize=address -DENABLE_ASAN=1
## Enable memory sanitizer to catch uninitialized reads.
## This is slow, and it's incompatible with other sanitizers
#ENABLE_MSAN = -fsanitize=memory -fsanitize-memory-track-origins
## Enable undefined behavior sanitizer to catch undefined behavior.
## This is slow, and it's incompatible with other sanitizers
#ENABLE_UBSAN = -fsanitize=undefined
## Enable thread sanitizer. The slowest sanitizer of them
## all. But useful for finding thread related data race issues
## in the allocator in code paths that use atomic_flag
#ENABLE_TSAN = -fsanitize=thread
SANITIZER_SUPPORT = $(ENABLE_ASAN) $(ENABLE_MSAN) $(ENABLE_UBSAN) $(ENABLE_TSAN)
## Support for threads adds a performance overhead
## You can safely disable it here if you know your
## program does not require concurrent access
## to the IsoAlloc APIs
## THREAD_CACHE - Enables thread zone cache
THREAD_SUPPORT = -DTHREAD_SUPPORT=1 -pthread -DTHREAD_CACHE=1
## This tells IsoAlloc to only start with 4 default zones.
## If you set it to 0 IsoAlloc will startup with 10. The
## performance penalty for setting it to 0 is a one time
## startup cost but more memory may be wasted. See the
## comments in iso_alloc_internal.h for modifying this
STARTUP_MEM_USAGE = -DSMALL_MEM_STARTUP=0
## Instructs the kernel (via mmap) to prepopulate
## page tables which will reduce page faults and
## sometimes improve performance. If you're using
## IsoAlloc for small short lived programs you probably
## want to disable this. This is ignored on MacOS
PRE_POPULATE_PAGES = -DPRE_POPULATE_PAGES=0
## Enable some functionality that like IsoAlloc internals
## for tests that need to verify security properties
UNIT_TESTING = -DUNIT_TESTING=1
## Enable the malloc/free and new/delete hooks
MALLOC_HOOK = -DMALLOC_HOOK=1
## Enable the built-in heap profiler. When this is enabled
## IsoAlloc will write a file to disk upon exit of the
## program. This file encodes the heap usage patterns of
## the target. This file can be consumed by the profiler
## CLI utility. See PROFILER.md for the format of this file
#HEAP_PROFILER = -DHEAP_PROFILER=1 -fno-omit-frame-pointer
## Enable CPU pinning support on a per-zone basis. This is
## a minor security feature which introduces an allocation
## isolation property that is defined by CPU core. See the
## README for more detailed information. (Linux only)
CPU_PIN = -DCPU_PIN=0
## Enable the allocation sanity feature. This works a lot
## like GWP-ASAN does. It samples calls to iso_alloc and
## randomly swaps them out for raw page allocations that
## are surrounded by guard pages. These pages are unmapped
## upon free. Much like GWP-ASAN this is designed to be
## used in production builds and should not incur too
## much of a performance penalty
ALLOC_SANITY = -DALLOC_SANITY=0
## Enable the userfaultfd based uninitialized read detection
## feature. This samples calls to malloc, and allocates raw
## pages of memory with mmap which are registered with the
## userfaultfd subsystem. We detect uninitialized reads by
## looking for the first read access of that page before a
## previous call to write. Think of it as GWP-ASAN but for
## uninitialized reads. Enabling this feature does incur a
## performance penalty. This requires that both ALLOC_SANITY
## and THREAD_SUPPORT are enabled. Linux only
UNINIT_READ_SANITY = -DUNINIT_READ_SANITY=0
## Enable a sampling mechanism that searches for references
## to a chunk currently being freed. The search only overwrites
## the first reference to that chunk because searching all
## zones is very slow.
UAF_PTR_PAGE = -DUAF_PTR_PAGE=0
## Unmap user and bitmap in the destructor. You probably
## don't want this as theres no guarantee the IsoAlloc
## destructor will be called last and other destructors
## that call free will segfault
ISO_DTOR_CLEANUP = -DISO_DTOR_CLEANUP=0
## Verifies the free bit slot cache does not contain duplicate
## entries which might lead to IsoAlloc handing out an in-use
## chunk to a caller. This is a slow search that has a small
## performance penalty
VERIFY_BIT_SLOT_CACHE = -DVERIFY_BIT_SLOT_CACHE=0
## Shuffles the free bit slot cache upon creation.
## This leads to a 3-4x performance slow down!
SHUFFLE_BIT_SLOT_CACHE = -DSHUFFLE_BIT_SLOT_CACHE=0
## Enable experimental features that are not guaranteed to
## compile, or introduce stability and performance bugs
EXPERIMENTAL = -DEXPERIMENTAL=0
## These control log, memory leak, and memory usage code
## In a release build you probably want them all to be 0
DEBUG_LOG_FLAGS = -DDEBUG=1 -DLEAK_DETECTOR=1 -DMEM_USAGE=1
## On Android we use prctl to name mappings so they are
## visible in /proc/pid/maps - But the Android build does
## not use this Makefile. You want to modify Android.mk
NAMED_MAPPINGS = -DNAMED_MAPPINGS=0
## Abort when the allocator cannot return a valid chunk
ABORT_ON_NULL = -DABORT_ON_NULL=0
## Enable protection against misusing 0 sized allocations
NO_ZERO_ALLOCATIONS = -DNO_ZERO_ALLOCATIONS=1
UNAME := $(shell uname)
ifeq ($(UNAME), Darwin)
OS_FLAGS = -framework Security
CPU_PIN = ""
endif
ifeq ($(UNAME), Linux)
STRIP = strip -s $(BUILD_DIR)/libisoalloc.so
endif
HOOKS = $(MALLOC_HOOK)
OPTIMIZE = -O2 -fstrict-aliasing -Wstrict-aliasing
COMMON_CFLAGS = -Wall -Iinclude/ $(THREAD_SUPPORT) $(PRE_POPULATE_PAGES) $(STARTUP_MEM_USAGE)
BUILD_ERROR_FLAGS = -Wno-pointer-arith -Wno-gnu-zero-variadic-macro-arguments -Wno-format-pedantic
ifneq ($(CC), gcc)
BUILD_ERROR_FLAGS := $(BUILD_ERROR_FLAGS) -Werror -pedantic
else
BUILD_ERROR_FLAGS := $(BUILD_ERROR_FLAGS) -Wno-attributes -Wno-unused-variable
endif
CFLAGS = $(COMMON_CFLAGS) $(SECURITY_FLAGS) $(BUILD_ERROR_FLAGS) $(HOOKS) $(HEAP_PROFILER) -fvisibility=hidden \
-std=c11 $(SANITIZER_SUPPORT) $(ALLOC_SANITY) $(UNINIT_READ_SANITY) $(CPU_PIN) $(EXPERIMENTAL) $(UAF_PTR_PAGE) \
$(VERIFY_BIT_SLOT_CACHE) $(NAMED_MAPPINGS) $(ABORT_ON_NULL) $(NO_ZERO_ALLOCATIONS) $(ABORT_NO_ENTROPY) \
$(ISO_DTOR_CLEANUP) $(SHUFFLE_BIT_SLOT_CACHE)
CXXFLAGS = $(COMMON_CFLAGS) -DCPP_SUPPORT=1 -std=c++17 $(SANITIZER_SUPPORT) $(HOOKS)
EXE_CFLAGS = -fPIE
GDB_FLAGS = -g -ggdb3 -fno-omit-frame-pointer
PERF_FLAGS = -pg -DPERF_TEST_BUILD=1
LIBRARY = -fPIC -shared
SRC_DIR = src
C_SRCS = $(SRC_DIR)/*.c
CXX_SRCS = $(SRC_DIR)/*.cpp
ISO_ALLOC_PRINTF_SRC = $(SRC_DIR)/iso_alloc_printf.c
BUILD_DIR = build
LDFLAGS = -L$(BUILD_DIR) -lisoalloc
all: library tests
## Build a release version of the library
library: clean
@echo "make library"
$(CC) $(CFLAGS) $(LIBRARY) $(OPTIMIZE) $(OS_FLAGS) $(C_SRCS) -o $(BUILD_DIR)/libisoalloc.so
$(STRIP)
## Build a debug version of the library
library_debug: clean
@echo "make library debug"
$(CC) $(CFLAGS) $(LIBRARY) $(OS_FLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) $(C_SRCS) -o $(BUILD_DIR)/libisoalloc.so
## Build a debug version of the library
## specifically for unit tests
library_debug_unit_tests: clean
@echo "make library_debug_unit_tests"
$(CC) $(CFLAGS) $(LIBRARY) $(OS_FLAGS) $(UNIT_TESTING) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) $(C_SRCS) -o $(BUILD_DIR)/libisoalloc.so
## Builds a debug version of the library with scan-build
## Requires scan-build is in your PATH
analyze_library_debug: clean
@echo "make analyze_library_debug"
scan-build $(CC) $(CFLAGS) $(LIBRARY) $(OS_FLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) $(C_SRCS) -o $(BUILD_DIR)/libisoalloc.so
## Build a debug version of the library
library_debug_no_output: clean
@echo "make library_debug_no_output"
$(CC) $(CFLAGS) $(LIBRARY) $(OS_FLAGS) $(GDB_FLAGS) $(C_SRCS) -o $(BUILD_DIR)/libisoalloc.so
## C++ Support - Build object files for C code
c_library_objects:
@echo "make c_library_objects"
$(CC) $(CFLAGS) $(OPTIMIZE) $(C_SRCS) -fPIC -c
mv *.o $(BUILD_DIR)
## C++ Support - Build debug object files for C code
c_library_objects_debug:
@echo "make c_library_objects_debug"
$(CC) $(CFLAGS) $(C_SRCS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) -fPIC -c
mv *.o $(BUILD_DIR)
## C++ Support - Build the library with C++ support
cpp_library: clean c_library_objects
@echo "make cpp_library"
$(CXX) $(CXXFLAGS) $(OPTIMIZE) $(LIBRARY) $(OS_FLAGS) $(CXX_SRCS) $(BUILD_DIR)/*.o -o $(BUILD_DIR)/libisoalloc.so
$(STRIP)
cpp_library_debug: clean c_library_objects_debug
@echo "make cpp_library_debug"
$(CXX) $(CXXFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) $(LIBRARY) $(OS_FLAGS) $(CXX_SRCS) $(BUILD_DIR)/*.o -o $(BUILD_DIR)/libisoalloc.so
## Build a debug version of the unit test
tests: clean library_debug_unit_tests
@echo "make library_debug_unit_tests"
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/uaf.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/uaf
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/interfaces_test.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/interfaces_test $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/thread_tests.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/thread_tests $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) $(UNIT_TESTING) tests/big_canary_test.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/big_canary_test $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/tests.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/tests $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/big_tests.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/big_tests $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/double_free.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/double_free $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/big_double_free.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/big_double_free $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/heap_overflow.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/heap_overflow $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/heap_underflow.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/heap_underflow $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/leaks_test.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/leaks_test $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/wild_free.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/wild_free $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/unaligned_free.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/unaligned_free $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/incorrect_chunk_size_multiple.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/incorrect_chunk_size_multiple $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/zero_alloc.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/zero_alloc $(LDFLAGS)
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) tests/uninit_read.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/uninit_read $(LDFLAGS)
utils/run_tests.sh
fuzz_test: clean library_debug_unit_tests
@echo "make fuzz_test"
$(CC) $(CFLAGS) $(EXE_CFLAGS) $(DEBUG_LOG_FLAGS) $(GDB_FLAGS) -DNEVER_REUSE_ZONES=1 tests/alloc_fuzz.c $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/alloc_fuzz $(LDFLAGS)
LD_LIBRARY_PATH=build/ build/alloc_fuzz
## Build a non-debug library with performance
## monitoring enabled. Linux only
perf_tests: clean
@echo "make perf_tests"
$(CC) $(CFLAGS) $(C_SRCS) $(GDB_FLAGS) $(PERF_FLAGS) tests/tests.c -o $(BUILD_DIR)/tests_gprof
$(CC) $(CFLAGS) $(C_SRCS) $(GDB_FLAGS) $(PERF_FLAGS) tests/big_tests.c -o $(BUILD_DIR)/big_tests_gprof
$(BUILD_DIR)/tests_gprof
gprof -b $(BUILD_DIR)/tests_gprof gmon.out > tests_perf_analysis.txt
$(BUILD_DIR)/big_tests_gprof
gprof -b $(BUILD_DIR)/big_tests_gprof gmon.out > big_tests_perf_analysis.txt
## Runs a single test that prints CPU time
## compared to the same malloc/free operations
malloc_cmp_test: clean
@echo "make malloc_cmp_test"
$(CC) $(CFLAGS) $(C_SRCS) $(OPTIMIZE) $(EXE_CFLAGS) $(OS_FLAGS) tests/tests.c -o $(BUILD_DIR)/tests
$(CC) $(CFLAGS) $(OPTIMIZE) $(EXE_CFLAGS) $(OS_FLAGS) -DMALLOC_PERF_TEST $(ISO_ALLOC_PRINTF_SRC) tests/tests.c -o $(BUILD_DIR)/malloc_tests
echo "Running IsoAlloc Performance Test"
build/tests
echo "Running glibc malloc Performance Test"
build/malloc_tests
## C++ Support - Build a debug version of the unit test
cpp_tests: clean cpp_library_debug
@echo "make cpp_tests"
$(CXX) $(CXXFLAGS) $(DEBUG_LOG_FLAGS) $(EXE_CFLAGS) tests/tests.cpp $(ISO_ALLOC_PRINTF_SRC) -o $(BUILD_DIR)/cxx_tests $(LDFLAGS)
LD_LIBRARY_PATH=$(BUILD_DIR)/ LD_PRELOAD=$(BUILD_DIR)/libisoalloc.so $(BUILD_DIR)/cxx_tests
install:
cp -pR build/libisoalloc.so /usr/lib/
format:
clang-format $(SRC_DIR)/*.* tests/*.* include/*.h -i
clean:
rm -rf build/* tests_perf_analysis.txt big_tests_perf_analysis.txt gmon.out test_output.txt *.dSYM core* profiler.data
mkdir -p build/
<|start_filename|>include/iso_alloc_sanity.h<|end_filename|>
/* iso_alloc_sanity.h - A secure memory allocator
* Copyright 2021 - <EMAIL> */
#pragma once
#if CPP_SUPPORT
#define _GNU_SOURCE
#endif
#if !__aarch64__ && !__x86_64__
#pragma message "IsoAlloc is untested and unsupported on 32 bit platforms"
#endif
#if UNINIT_READ_SANITY
#include <fcntl.h>
#include <linux/userfaultfd.h>
#include <poll.h>
#include <sys/ioctl.h>
#include <sys/syscall.h>
#endif
#define SANITY_SAMPLE_ODDS 10000
#define MAX_SANE_SAMPLES 1024
#define SANE_CACHE_SIZE 65535
#define SANE_CACHE_IDX(p) (((uint64_t) p >> 8) & 0xffff)
#define SANITY_CANARY_VALIDATE_MASK 0xffffffffffffff00
#define SANITY_CANARY_SIZE 8
#if THREAD_SUPPORT
extern atomic_flag sane_cache_flag;
#define LOCK_SANITY_CACHE() \
do { \
} while(atomic_flag_test_and_set(&sane_cache_flag));
#define UNLOCK_SANITY_CACHE() \
atomic_flag_clear(&sane_cache_flag);
#else
#define LOCK_SANITY_CACHE()
#define UNLOCK_SANITY_CACHE()
#endif
#if UNINIT_READ_SANITY
extern pthread_t _page_fault_thread;
extern struct uffdio_api _uffd_api;
extern int64_t _uf_fd;
#endif
extern int32_t _sane_sampled;
extern uint8_t _sane_cache[SANE_CACHE_SIZE];
typedef struct {
void *guard_below;
void *guard_above;
void *address;
size_t orig_size;
bool right_aligned;
} _sane_allocation_t;
extern _sane_allocation_t _sane_allocations[MAX_SANE_SAMPLES];
extern uint64_t _sanity_canary;
#if UNINIT_READ_SANITY
INTERNAL_HIDDEN void _iso_alloc_setup_userfaultfd();
INTERNAL_HIDDEN void *_page_fault_thread_handler(void *uf_fd);
#endif
INTERNAL_HIDDEN INLINE void write_sanity_canary(void *p);
INTERNAL_HIDDEN INLINE void check_sanity_canary(_sane_allocation_t *sane_alloc);
INTERNAL_HIDDEN void *_iso_alloc_sample(size_t size);
INTERNAL_HIDDEN int32_t _iso_alloc_free_sane_sample(void *p);
INTERNAL_HIDDEN int32_t _remove_from_sane_trace(void *p);
INTERNAL_HIDDEN _sane_allocation_t *_get_sane_alloc(void *p);
<|start_filename|>src/iso_alloc_sanity.c<|end_filename|>
/* iso_alloc_sanity.c - A secure memory allocator
* Copyright 2021 - <EMAIL> */
#include "iso_alloc_internal.h"
#if ALLOC_SANITY
atomic_flag sane_cache_flag;
uint64_t _sanity_canary;
int32_t _sane_sampled;
uint8_t _sane_cache[SANE_CACHE_SIZE];
_sane_allocation_t _sane_allocations[MAX_SANE_SAMPLES];
#if UNINIT_READ_SANITY
pthread_t _page_fault_thread;
struct uffdio_api _uffd_api;
int64_t _uf_fd;
INTERNAL_HIDDEN void _iso_alloc_setup_userfaultfd() {
_uf_fd = syscall(__NR_userfaultfd, O_CLOEXEC | O_NONBLOCK);
if(_uf_fd == ERR) {
LOG_AND_ABORT("This kernel does not support userfaultfd");
}
_uffd_api.api = UFFD_API;
_uffd_api.features = 0;
if(ioctl(_uf_fd, UFFDIO_API, &_uffd_api) == ERR) {
LOG_AND_ABORT("Failed to setup userfaultfd with ioctl");
}
if(_page_fault_thread == 0) {
int32_t s = pthread_create(&_page_fault_thread, NULL, _page_fault_thread_handler, NULL);
if(s != OK) {
LOG_AND_ABORT("Cannot create userfaultfd handler thread");
}
}
}
INTERNAL_HIDDEN void *_page_fault_thread_handler(void *unused) {
static struct uffd_msg umsg;
ssize_t n;
while(true) {
struct pollfd pollfd;
int32_t ret;
pollfd.fd = _uf_fd;
pollfd.events = POLLIN;
ret = poll(&pollfd, 1, -1);
if(ret == ERR) {
LOG_AND_ABORT("Failed to poll userfaultfd file descriptor");
}
n = read(_uf_fd, &umsg, sizeof(struct uffd_msg));
if(n == OK) {
LOG_AND_ABORT("Got EOF on userfaultfd file descriptor")
}
if(n == ERR) {
LOG_AND_ABORT("Failed to read from userfaultfd file descriptor")
}
if(umsg.event != UFFD_EVENT_PAGEFAULT) {
LOG_AND_ABORT("Received non-page-fault event from userfaultfd")
}
LOCK_SANITY_CACHE();
_sane_allocation_t *sane_alloc = _get_sane_alloc((void *) umsg.arg.pagefault.address);
/* This is where we detect uninitialized reads. Whenever we
* receive a page fault we check if its a read or a write operation.
* If its a write then we unregister the page from userfaultfd
* but if its a read then we assume this chunk was not initialized.
* It is possible we will receive a read event while we are
* unregistering a page that was previously written to */
if((umsg.arg.pagefault.flags & UFFD_PAGEFAULT_FLAG_WRITE) == 1) {
/* Unregister this page but don't remove it from our cache
* of tracked pages, we still need to unmap it at some point */
struct uffdio_register reg = {0};
if(sane_alloc != NULL) {
reg.range.start = (uint64_t) sane_alloc->address;
reg.range.len = g_page_size;
} else {
/* We received a page fault for an address we are no
* longer tracking. We don't know why but it's a write
* and we don't care about writes */
reg.range.start = umsg.arg.pagefault.address;
reg.range.len = g_page_size;
}
if((ioctl(_uf_fd, UFFDIO_UNREGISTER, ®.range)) == ERR) {
LOG_AND_ABORT("Failed to unregister address %p", umsg.arg.pagefault.address);
}
UNLOCK_SANITY_CACHE();
continue;
}
/* Detects a read of an uninitialized page */
if((umsg.arg.pagefault.flags & UFFD_PAGEFAULT_FLAG_WRITE) == 0) {
LOG_AND_ABORT("Uninitialized read detected on page %p", umsg.arg.pagefault.address);
}
UNLOCK_SANITY_CACHE();
}
UNLOCK_SANITY_CACHE();
return NULL;
}
#endif /* UNINIT_READ_SANITY */
INTERNAL_HIDDEN INLINE void write_sanity_canary(void *p) {
uint64_t canary = (_sanity_canary & SANITY_CANARY_VALIDATE_MASK);
for(int32_t i = 0; i < (g_page_size / sizeof(uint64_t)); i++) {
memcpy(p, &canary, SANITY_CANARY_SIZE);
p += sizeof(uint64_t);
}
}
/* Verify the canary value in an allocation */
INTERNAL_HIDDEN INLINE void check_sanity_canary(_sane_allocation_t *sane_alloc) {
void *end = NULL;
void *start = NULL;
if(sane_alloc->right_aligned == true) {
end = ((sane_alloc->address + g_page_size) - sane_alloc->orig_size);
start = sane_alloc->address;
} else {
end = sane_alloc->address + g_page_size;
start = sane_alloc->address + sane_alloc->orig_size;
}
while(start < end) {
uint64_t v = *((uint64_t *) start);
uint64_t canary = (_sanity_canary & SANITY_CANARY_VALIDATE_MASK);
if(UNLIKELY(v != canary)) {
LOG_AND_ABORT("Sanity canary at 0x%p has been corrupted! Value: 0x%x Expected: 0x%x", start, v, canary);
}
start += sizeof(uint64_t);
}
}
/* Callers of this function should hold the sanity cache lock */
INTERNAL_HIDDEN _sane_allocation_t *_get_sane_alloc(void *p) {
if(_sane_cache[SANE_CACHE_IDX(p)] == 0) {
return NULL;
}
void *pa = NULL;
if(IS_PAGE_ALIGNED((uintptr_t) p)) {
pa = (void *) ROUND_DOWN_PAGE((uintptr_t) p);
} else {
pa = p;
}
for(uint32_t i = 0; i < MAX_SANE_SAMPLES; i++) {
if(_sane_allocations[i].address == pa) {
return &_sane_allocations[i];
}
}
return NULL;
}
INTERNAL_HIDDEN int32_t _iso_alloc_free_sane_sample(void *p) {
LOCK_SANITY_CACHE();
_sane_allocation_t *sane_alloc = _get_sane_alloc(p);
if(sane_alloc != NULL) {
check_sanity_canary(sane_alloc);
munmap(sane_alloc->guard_below, g_page_size);
munmap(sane_alloc->guard_above, g_page_size);
munmap(sane_alloc->address, g_page_size);
memset(sane_alloc, 0x0, sizeof(_sane_allocation_t));
_sane_cache[SANE_CACHE_IDX(p)]--;
_sane_sampled--;
UNLOCK_SANITY_CACHE();
return OK;
}
UNLOCK_SANITY_CACHE();
return ERR;
}
INTERNAL_HIDDEN void *_iso_alloc_sample(size_t size) {
#if UNINIT_READ_SANITY
if(_page_fault_thread == 0 || LIKELY((rand_uint64() % SANITY_SAMPLE_ODDS) != 1)) {
#else
if(LIKELY((rand_uint64() % SANITY_SAMPLE_ODDS) != 1)) {
#endif
return NULL;
}
_sane_allocation_t *sane_alloc = NULL;
LOCK_SANITY_CACHE();
/* Find the first free slot in our sampled storage */
for(uint32_t i = 0; i < MAX_SANE_SAMPLES; i++) {
if(_sane_allocations[i].address == 0) {
sane_alloc = &_sane_allocations[i];
break;
}
}
/* There are no available slots in the cache */
if(sane_alloc == NULL) {
LOG_AND_ABORT("There are no free slots in the cache, there should be %d", _sane_sampled);
}
sane_alloc->orig_size = size;
void *p = mmap_rw_pages(g_page_size * 3, false, SAMPLED_ALLOC_NAME);
if(p == NULL) {
LOG_AND_ABORT("Cannot allocate pages for sampled allocation");
}
sane_alloc->guard_below = p;
create_guard_page(sane_alloc->guard_below);
sane_alloc->guard_above = (void *) ROUND_UP_PAGE((uintptr_t) (p + (g_page_size * 2)));
create_guard_page(sane_alloc->guard_above);
p = (p + g_page_size);
/* We may right align the mapping to catch overflows */
if(rand_uint64() % 1 == 1) {
p = (p + g_page_size) - sane_alloc->orig_size;
sane_alloc->right_aligned = true;
sane_alloc->address = (void *) ROUND_DOWN_PAGE((uintptr_t) p);
} else {
sane_alloc->address = p;
}
#if UNINIT_READ_SANITY
struct uffdio_register reg = {0};
reg.range.start = (uint64_t) ROUND_DOWN_PAGE(p);
reg.range.len = g_page_size;
reg.mode = UFFDIO_REGISTER_MODE_MISSING;
#endif
_sane_cache[SANE_CACHE_IDX(p)]++;
_sane_sampled++;
#if UNINIT_READ_SANITY
if((ioctl(_uf_fd, UFFDIO_REGISTER, ®)) == ERR) {
LOG_AND_ABORT("Failed to register address %p", p);
}
#endif
#if !UNINIT_READ_SANITY
write_sanity_canary(sane_alloc->address);
#endif
UNLOCK_SANITY_CACHE();
return p;
}
#endif
<|start_filename|>src/iso_alloc_printf.c<|end_filename|>
/* iso_alloc_printf.c - A secure memory allocator
* Copyright 2021 - <EMAIL> */
#include <stdarg.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#define INTERNAL_HIDDEN __attribute__((visibility("hidden")))
/* This primitive printf implementation is only ever called
* called from the LOG and LOG_AND_ABORT macros. We need to
* be able to print basic log messages without invoking
* malloc() or we run the risk of using a corrupted heap */
static int8_t fmt_buf[64];
static const int8_t asc_hex[] = "0123456789abcdef";
INTERNAL_HIDDEN int8_t *_fmt(uint64_t n, uint32_t base) {
int8_t *ptr;
uint32_t count = 0;
memset(fmt_buf, 0x0, sizeof(fmt_buf));
ptr = &fmt_buf[63];
while(n != 0) {
*--ptr = asc_hex[n % base];
n /= base;
count++;
};
if(count == 0) {
ptr = (int8_t *) "0";
}
return ptr;
}
INTERNAL_HIDDEN void _iso_alloc_printf(int32_t fd, const char *f, ...) {
if(f == NULL) {
return;
}
uint64_t i;
uint32_t j;
char *s;
va_list arg;
va_start(arg, f);
char out[65535];
char *p = out;
memset(out, 0x0, sizeof(out));
for(const char *idx = f; *idx != '\0'; idx++) {
if(p >= (char *) (out + sizeof(out))) {
break;
}
while(*idx != '%' && *idx != '\0') {
*p = *idx;
p++;
if(*idx == '\n') {
break;
}
idx++;
}
idx++;
if(*idx == '\0') {
break;
}
if(*idx == 'x' || *idx == 'p') {
i = va_arg(arg, int64_t);
s = (char *) _fmt(i, 16);
strncpy(p, s, strlen(s));
p += strlen(s);
} else if(*idx == 'd' || *idx == 'u') {
j = va_arg(arg, int32_t);
if(0 > j) {
j = -j;
*p = '-';
p++;
}
s = (char *) _fmt(j, 10);
strncpy(p, s, strlen(s));
p += strlen(s);
} else if(*idx == 'l') {
if(*(idx + 1) == 'd' || *(idx + 1) == 'u') {
idx++;
}
i = va_arg(arg, int64_t);
if(0 > i) {
i = -i;
*p = '-';
p++;
}
s = (char *) _fmt(i, 10);
strncpy(p, s, strlen(s));
p += strlen(s);
} else if(*idx == 's') {
s = va_arg(arg, char *);
if(s == NULL) {
break;
}
strncpy(p, s, strlen(s));
p += strlen(s);
}
}
(void) !write(fd, out, strlen(out));
va_end(arg);
}
<|start_filename|>android/jni/Application.mk<|end_filename|>
APP_ABI := arm64-v8a x86_64
APP_PLATFORM := android-23
<|start_filename|>src/iso_alloc.c<|end_filename|>
/* iso_alloc.c - A secure memory allocator
* Copyright 2021 - <EMAIL> */
#include "iso_alloc_internal.h"
#if THREAD_SUPPORT
atomic_flag root_busy_flag;
atomic_flag big_zone_busy_flag;
#if THREAD_CACHE
static __thread _tzc thread_zone_cache[THREAD_ZONE_CACHE_SZ];
static __thread size_t thread_zone_cache_count;
static __thread _tzcbs thread_bit_slot_cache;
#endif
#endif
uint32_t g_page_size;
uint32_t _default_zone_count;
iso_alloc_root *_root;
#if NO_ZERO_ALLOCATIONS
void *_zero_alloc_page;
#endif
/* Select a random number of chunks to be canaries. These
* can be verified anytime by calling check_canary()
* or check_canary_no_abort() */
INTERNAL_HIDDEN void create_canary_chunks(iso_alloc_zone *zone) {
#if ENABLE_ASAN || DISABLE_CANARY
return;
#else
/* Canary chunks are only for default zone sizes. This
* is because larger zones would waste a lot of memory
* if we set aside some of their chunks as canaries */
if(zone->chunk_size > MAX_DEFAULT_ZONE_SZ) {
return;
}
bitmap_index_t *bm = (bitmap_index_t *) zone->bitmap_start;
bitmap_index_t max_bitmap_idx = GET_MAX_BITMASK_INDEX(zone) - 1;
uint64_t chunk_count = GET_CHUNK_COUNT(zone);
bit_slot_t bit_slot;
/* Roughly %1 of the chunks in this zone will become a canary */
uint64_t canary_count = (chunk_count / CANARY_COUNT_DIV);
/* This function is only ever called during zone
* initialization so we don't need to check the
* current state of any chunks, they're all free.
* It's possible the call to rand_uint64() here will
* return the same index twice, we can live with
* that collision as canary chunks only provide a
* small security property anyway */
for(uint64_t i = 0; i < canary_count; i++) {
bitmap_index_t bm_idx = ALIGN_SZ_DOWN((rand_uint64() % (max_bitmap_idx)));
if(0 > bm_idx) {
bm_idx = 0;
}
/* Set the 1st and 2nd bits as 1 */
SET_BIT(bm[bm_idx], 0);
SET_BIT(bm[bm_idx], 1);
bit_slot = (bm_idx << BITS_PER_QWORD_SHIFT);
void *p = POINTER_FROM_BITSLOT(zone, bit_slot);
write_canary(zone, p);
}
#endif
}
#if ENABLE_ASAN
/* Verify the integrity of all canary chunks and the
* canary written to all free chunks. This function
* either aborts or returns nothing */
INTERNAL_HIDDEN void verify_all_zones(void) {
return;
}
INTERNAL_HIDDEN void verify_zone(iso_alloc_zone *zone) {
return;
}
INTERNAL_HIDDEN void _verify_all_zones(void) {
return;
}
INTERNAL_HIDDEN void _verify_zone(iso_alloc_zone *zone) {
return;
}
#else
/* Verify the integrity of all canary chunks and the
* canary written to all free chunks. This function
* either aborts or returns nothing */
INTERNAL_HIDDEN void verify_all_zones(void) {
LOCK_ROOT();
_verify_all_zones();
UNLOCK_ROOT();
}
INTERNAL_HIDDEN void verify_zone(iso_alloc_zone *zone) {
LOCK_ROOT();
_verify_zone(zone);
UNLOCK_ROOT();
}
INTERNAL_HIDDEN void _verify_all_zones(void) {
for(int32_t i = 0; i < _root->zones_used; i++) {
iso_alloc_zone *zone = &_root->zones[i];
if(zone->bitmap_start == NULL || zone->user_pages_start == NULL) {
break;
}
_verify_zone(zone);
}
/* No need to lock big zone here since the
* root should be locked by our caller */
iso_alloc_big_zone *big = _root->big_zone_head;
if(big != NULL) {
big = UNMASK_BIG_ZONE_NEXT(_root->big_zone_head);
}
while(big != NULL) {
check_big_canary(big);
if(big->next != NULL) {
big = UNMASK_BIG_ZONE_NEXT(big->next);
} else {
break;
}
}
}
INTERNAL_HIDDEN void _verify_zone(iso_alloc_zone *zone) {
UNMASK_ZONE_PTRS(zone);
bitmap_index_t *bm = (bitmap_index_t *) zone->bitmap_start;
bitmap_index_t max_bm_idx = GET_MAX_BITMASK_INDEX(zone);
bit_slot_t bit_slot;
int64_t bit;
for(bitmap_index_t i = 0; i < max_bm_idx; i++) {
for(int64_t j = 1; j < BITS_PER_QWORD; j += BITS_PER_CHUNK) {
bit = GET_BIT(bm[i], j);
/* If this bit is set it is either a free chunk or
* a canary chunk. Either way it should have a set
* of canaries we can verify */
if(bit == 1) {
bit_slot = (i << BITS_PER_QWORD_SHIFT) + j;
void *p = POINTER_FROM_BITSLOT(zone, bit_slot);
check_canary(zone, p);
}
}
}
MASK_ZONE_PTRS(zone);
}
#endif
/* Pick a random index in the bitmap and start looking
* for free bit slots we can add to the cache. The random
* bitmap index is to protect against biasing the free
* slot cache with only chunks towards the start of the
* user mapping. Theres no guarantee this function will
* find any free slots. */
INTERNAL_HIDDEN INLINE void fill_free_bit_slot_cache(iso_alloc_zone *zone) {
bitmap_index_t *bm = (bitmap_index_t *) zone->bitmap_start;
bitmap_index_t max_bitmap_idx = GET_MAX_BITMASK_INDEX(zone);
bit_slot_t bit_slot;
/* This gives us an arbitrary spot in the bitmap to
* start searching but may mean we end up with a smaller
* cache. This may negatively affect performance but
* leads to a less predictable free list */
bitmap_index_t bm_idx = 0;
if(max_bitmap_idx > ALIGNMENT) {
bm_idx = (rand_uint64() % (max_bitmap_idx - 1));
}
memset(zone->free_bit_slot_cache, BAD_BIT_SLOT, sizeof(zone->free_bit_slot_cache));
zone->free_bit_slot_cache_usable = 0;
uint8_t free_bit_slot_cache_index;
for(free_bit_slot_cache_index = 0; free_bit_slot_cache_index < BIT_SLOT_CACHE_SZ; bm_idx++) {
/* Don't index outside of the bitmap or
* we will return inaccurate bit slots */
if(bm_idx >= max_bitmap_idx) {
zone->free_bit_slot_cache_index = free_bit_slot_cache_index;
return;
}
for(int64_t j = 0; j < BITS_PER_QWORD; j += BITS_PER_CHUNK) {
if(free_bit_slot_cache_index >= BIT_SLOT_CACHE_SZ) {
zone->free_bit_slot_cache_index = free_bit_slot_cache_index;
return;
}
if((GET_BIT(bm[bm_idx], j)) == 0) {
bit_slot = (bm_idx << BITS_PER_QWORD_SHIFT) + j;
zone->free_bit_slot_cache[free_bit_slot_cache_index] = bit_slot;
free_bit_slot_cache_index++;
}
}
}
#if SHUFFLE_BIT_SLOT_CACHE
/* Shuffle the free bit slot cache */
if(free_bit_slot_cache_index > 1) {
for(uint8_t i = free_bit_slot_cache_index - 1; i > 0; i--) {
uint8_t j = (uint8_t) (rand_uint64() % (i + 1));
bit_slot_t t = zone->free_bit_slot_cache[j];
zone->free_bit_slot_cache[j] = zone->free_bit_slot_cache[i];
zone->free_bit_slot_cache[i] = t;
}
}
#endif
zone->free_bit_slot_cache_index = free_bit_slot_cache_index;
}
INTERNAL_HIDDEN INLINE void insert_free_bit_slot(iso_alloc_zone *zone, int64_t bit_slot) {
#if VERIFY_BIT_SLOT_CACHE
/* The cache is sorted at creation time but once we start
* free'ing chunks we add bit_slots to it in an unpredictable
* order. So we can't search the cache with something like
* a binary search. This brute force search shouldn't incur
* too much of a performance penalty as we only search starting
* at the free_bit_slot_cache_usable index which is updated
* everytime we call get_next_free_bit_slot(). We do this in
* order to detect any corruption of the cache that attempts
* to add duplicate bit_slots which would result in iso_alloc()
* handing out in-use chunks. The _iso_alloc() path also does
* a check on the bitmap itself before handing out any chunks */
int32_t max_cache_slots = sizeof(zone->free_bit_slot_cache) >> 3;
for(int32_t i = zone->free_bit_slot_cache_usable; i < max_cache_slots; i++) {
if(zone->free_bit_slot_cache[i] == bit_slot) {
LOG_AND_ABORT("Zone[%d] already contains bit slot %lu in cache", zone->index, bit_slot);
}
}
#endif
if(zone->free_bit_slot_cache_index >= BIT_SLOT_CACHE_SZ) {
return;
}
zone->free_bit_slot_cache[zone->free_bit_slot_cache_index] = bit_slot;
zone->free_bit_slot_cache_index++;
}
INTERNAL_HIDDEN bit_slot_t get_next_free_bit_slot(iso_alloc_zone *zone) {
if(zone->free_bit_slot_cache_usable >= BIT_SLOT_CACHE_SZ ||
zone->free_bit_slot_cache_usable > zone->free_bit_slot_cache_index) {
return BAD_BIT_SLOT;
}
zone->next_free_bit_slot = zone->free_bit_slot_cache[zone->free_bit_slot_cache_usable];
zone->free_bit_slot_cache[zone->free_bit_slot_cache_usable++] = BAD_BIT_SLOT;
return zone->next_free_bit_slot;
}
INTERNAL_HIDDEN INLINE void iso_clear_user_chunk(uint8_t *p, size_t size) {
memset(p, POISON_BYTE, size);
}
INTERNAL_HIDDEN void *create_guard_page(void *p) {
if(p == NULL) {
p = mmap_rw_pages(g_page_size, false, GUARD_PAGE_NAME);
if(p == NULL) {
LOG_AND_ABORT("Could not allocate guard page");
}
}
/* Use g_page_size here because we could be
* calling this while we setup the root */
mprotect_pages(p, g_page_size, PROT_NONE);
madvise(p, g_page_size, MADV_DONTNEED);
return p;
}
INTERNAL_HIDDEN void *mmap_rw_pages(size_t size, bool populate, const char *name) {
return mmap_pages(size, populate, name, PROT_READ | PROT_WRITE);
}
INTERNAL_HIDDEN void *mmap_pages(size_t size, bool populate, const char *name, int32_t prot) {
#if !ENABLE_ASAN
/* Produce a random page address as a hint for mmap */
uint64_t hint = ROUND_DOWN_PAGE(rand_uint64());
hint &= 0x3FFFFFFFF000;
void *p = (void *) hint;
#else
void *p = NULL;
#endif
size = ROUND_UP_PAGE(size);
/* Only Linux supports MAP_POPULATE */
#if __linux__ && PRE_POPULATE_PAGES
if(populate == true) {
p = mmap(p, size, prot, MAP_PRIVATE | MAP_ANONYMOUS | MAP_POPULATE, -1, 0);
} else {
p = mmap(p, size, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
}
#else
p = mmap(p, size, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
#endif
if(p == MAP_FAILED) {
LOG_AND_ABORT("Failed to mmap rw pages");
return NULL;
}
if(name != NULL) {
name_mapping(p, size, name);
}
return p;
}
INTERNAL_HIDDEN void mprotect_pages(void *p, size_t size, int32_t protection) {
size = ROUND_UP_PAGE(size);
if((mprotect(p, size, protection)) == ERR) {
LOG_AND_ABORT("Failed to mprotect pages @ 0x%p", p);
}
}
INTERNAL_HIDDEN iso_alloc_root *iso_alloc_new_root(void) {
void *p = NULL;
iso_alloc_root *r;
size_t _root_size = sizeof(iso_alloc_root) + (g_page_size << 1);
p = (void *) mmap_rw_pages(_root_size, true, ROOT_NAME);
if(p == NULL) {
LOG_AND_ABORT("Cannot allocate pages for root");
}
r = (iso_alloc_root *) (p + g_page_size);
r->system_page_size = g_page_size;
r->guard_below = p;
create_guard_page(r->guard_below);
r->guard_above = (void *) ROUND_UP_PAGE((uintptr_t) (p + sizeof(iso_alloc_root) + r->system_page_size));
create_guard_page(r->guard_above);
return r;
}
INTERNAL_HIDDEN void iso_alloc_initialize_global_root(void) {
/* Do not allow a reinitialization unless root is NULL */
if(_root != NULL) {
return;
}
_root = iso_alloc_new_root();
if(_root == NULL) {
LOG_AND_ABORT("Could not initialize global root");
}
_default_zone_count = sizeof(default_zones) >> 3;
_root->zones_size = (MAX_ZONES * sizeof(iso_alloc_zone));
_root->zones_size += (g_page_size * 2);
_root->zones_size = ROUND_UP_PAGE(_root->zones_size);
/* Allocate memory with guard pages to hold zone data */
void *p = mmap_rw_pages(_root->zones_size, false, NULL);
create_guard_page(p);
create_guard_page((void *) (uintptr_t) (p + _root->zones_size) - g_page_size);
_root->zones = (void *) (p + g_page_size);
name_mapping(p, _root->zones_size, "isoalloc zone metadata");
for(int64_t i = 0; i < _default_zone_count; i++) {
if((_iso_new_zone(default_zones[i], true)) == NULL) {
LOG_AND_ABORT("Failed to create a new zone");
}
}
/* This call to mlock may fail if memory limits
* are set too low. This will not affect us
* at runtime. It just means some of the default
* root meta data may get swapped to disk */
mlock(&_root, sizeof(iso_alloc_root));
_root->zone_handle_mask = rand_uint64();
_root->big_zone_next_mask = rand_uint64();
_root->big_zone_canary_secret = rand_uint64();
}
__attribute__((constructor(FIRST_CTOR))) void iso_alloc_ctor(void) {
g_page_size = sysconf(_SC_PAGESIZE);
iso_alloc_initialize_global_root();
#if HEAP_PROFILER
_initialize_profiler();
#endif
#if NO_ZERO_ALLOCATIONS
_zero_alloc_page = mmap_pages(g_page_size, false, NULL, PROT_NONE);
#endif
#if ALLOC_SANITY && UNINIT_READ_SANITY
_iso_alloc_setup_userfaultfd();
#endif
#if ALLOC_SANITY
_sanity_canary = rand_uint64();
#endif
}
INTERNAL_HIDDEN INLINE void flush_thread_zone_cache() {
#if THREAD_SUPPORT && THREAD_CACHE
/* The thread zone cache needs to be invalidated */
memset(thread_zone_cache, 0x0, sizeof(thread_zone_cache));
thread_zone_cache_count = 0;
if(thread_bit_slot_cache.chunk != NULL) {
iso_alloc_zone *zone = iso_find_zone_range(thread_bit_slot_cache.chunk);
if(UNLIKELY(zone == NULL)) {
LOG_AND_ABORT("Cached thread pointer %p has been corrupted", thread_bit_slot_cache.chunk);
}
UNMASK_ZONE_PTRS(zone);
iso_free_chunk_from_zone(zone, thread_bit_slot_cache.chunk, false);
MASK_ZONE_PTRS(zone);
thread_bit_slot_cache.chunk = NULL;
thread_bit_slot_cache.chunk_size = 0;
}
#endif
}
INTERNAL_HIDDEN void _unmap_zone(iso_alloc_zone *zone) {
munmap(zone->bitmap_start, zone->bitmap_size);
munmap(zone->bitmap_start - _root->system_page_size, _root->system_page_size);
munmap(zone->bitmap_start + zone->bitmap_size, _root->system_page_size);
munmap(zone->user_pages_start, ZONE_USER_SIZE);
munmap(zone->user_pages_start - _root->system_page_size, _root->system_page_size);
munmap(zone->user_pages_start + ZONE_USER_SIZE, _root->system_page_size);
}
INTERNAL_HIDDEN void _iso_alloc_destroy_zone(iso_alloc_zone *zone) {
LOCK_ROOT();
UNMASK_ZONE_PTRS(zone);
UNPOISON_ZONE(zone);
if(zone->internally_managed == false) {
#if NEVER_REUSE_ZONES || FUZZ_MODE
_unmap_zone(zone);
zone->user_pages_start = NULL;
zone->bitmap_start = NULL;
/* Mark the zone as full so no attempts are made to use it */
zone->is_full = true;
flush_thread_zone_cache();
#else
/* This zone can be used again, we just need to wipe
* any sensitive data from it and prime it for use */
memset(zone->bitmap_start, 0x0, zone->bitmap_size);
memset(zone->user_pages_start, 0x0, ZONE_USER_SIZE);
/* Take over the zone to be used internally */
zone->internally_managed = true;
zone->is_full = false;
/* Reusing custom zones has the potential for introducing
* zone-use-after-free patterns. So we bootstrap the zone
* from scratch here */
create_canary_chunks(zone);
fill_free_bit_slot_cache(zone);
/* Prime the next_free_bit_slot member */
get_next_free_bit_slot(zone);
MASK_ZONE_PTRS(zone);
#endif
/* If we are destroying the zone lets give the memory
* back to the OS. It will still be available if we
* try to use it */
madvise(zone->bitmap_start, zone->bitmap_size, MADV_DONTNEED);
madvise(zone->user_pages_start, ZONE_USER_SIZE, MADV_DONTNEED);
POISON_ZONE(zone);
UNLOCK_ROOT();
} else {
/* The only time we ever destroy a default non-custom zone
* is from the destructor so its safe unmap pages */
_unmap_zone(zone);
flush_thread_zone_cache();
UNLOCK_ROOT();
}
}
__attribute__((destructor(LAST_DTOR))) void iso_alloc_dtor(void) {
LOCK_ROOT();
flush_thread_zone_cache();
#if HEAP_PROFILER
_iso_output_profile();
#endif
#if NO_ZERO_ALLOCATIONS
munmap(_zero_alloc_page, g_page_size);
#endif
#if DEBUG && (LEAK_DETECTOR || MEM_USAGE)
uint64_t mb = 0;
for(uint32_t i = 0; i < _root->zones_used; i++) {
iso_alloc_zone *zone = &_root->zones[i];
_iso_alloc_zone_leak_detector(zone, false);
}
mb = __iso_alloc_mem_usage();
#if MEM_USAGE
LOG("Total megabytes consumed by all zones: %lu", mb);
_iso_alloc_print_stats();
#endif
#endif
for(uint32_t i = 0; i < _root->zones_used; i++) {
iso_alloc_zone *zone = &_root->zones[i];
_verify_zone(zone);
#if ISO_DTOR_CLEANUP
_iso_alloc_destroy_zone(zone);
#endif
}
#if ISO_DTOR_CLEANUP
/* Unmap all zone structures */
munmap((void *) ((uintptr_t) _root->zones - g_page_size), _root->zones_size);
#endif
iso_alloc_big_zone *big_zone = _root->big_zone_head;
iso_alloc_big_zone *big = NULL;
if(big_zone != NULL) {
big_zone = UNMASK_BIG_ZONE_NEXT(_root->big_zone_head);
}
while(big_zone != NULL) {
check_big_canary(big_zone);
if(big_zone->next != NULL) {
big = UNMASK_BIG_ZONE_NEXT(big_zone->next);
} else {
big = NULL;
}
#if ISO_DTOR_CLEANUP
/* Free the user pages first */
void *up = big_zone->user_pages_start - _root->system_page_size;
munmap(up, (_root->system_page_size << 1) + big_zone->size);
/* Free the meta data */
munmap(big_zone - _root->system_page_size, (_root->system_page_size * BIG_ZONE_META_DATA_PAGE_COUNT));
#endif
big_zone = big;
}
#if ISO_DTOR_CLEANUP
munmap(_root->guard_below, _root->system_page_size);
munmap(_root->guard_above, _root->system_page_size);
munmap(_root, sizeof(iso_alloc_root));
#endif
UNLOCK_ROOT();
}
INTERNAL_HIDDEN int32_t name_mapping(void *p, size_t sz, const char *name) {
#if NAMED_MAPPINGS && __ANDROID__
return prctl(PR_SET_VMA, PR_SET_VMA_ANON_NAME, p, sz, name);
#endif
return 0;
}
INTERNAL_HIDDEN iso_alloc_zone *iso_new_zone(size_t size, bool internal) {
LOCK_ROOT();
iso_alloc_zone *zone = _iso_new_zone(size, internal);
UNLOCK_ROOT();
return zone;
}
INTERNAL_HIDDEN iso_alloc_zone *_iso_new_zone(size_t size, bool internal) {
if(_root->zones_used >= MAX_ZONES) {
LOG_AND_ABORT("Cannot allocate additional zones");
}
if(size > SMALL_SZ_MAX) {
LOG("Request for new zone with %ld byte chunks should be handled by big alloc path", size);
return NULL;
}
/* Chunk size must be aligned */
if(IS_ALIGNED(size) != 0) {
size = ALIGN_SZ_UP(size);
}
/* Minimum chunk size */
if(size < SMALLEST_CHUNK_SZ) {
size = SMALLEST_CHUNK_SZ;
}
iso_alloc_zone *new_zone = &_root->zones[_root->zones_used];
new_zone->internally_managed = internal;
new_zone->is_full = false;
new_zone->chunk_size = size;
/* If a caller requests an allocation that is >=(ZONE_USER_SIZE/2)
* then we need to allocate a minimum size bitmap */
uint32_t bitmap_size = (GET_CHUNK_COUNT(new_zone) << BITS_PER_CHUNK_SHIFT) >> BITS_PER_BYTE_SHIFT;
new_zone->bitmap_size = (bitmap_size > sizeof(bitmap_index_t)) ? bitmap_size : sizeof(bitmap_index_t);
/* All of the following fields are immutable
* and should not change once they are set */
void *p = mmap_rw_pages(new_zone->bitmap_size + (_root->system_page_size << 1), true, ZONE_BITMAP_NAME);
void *bitmap_pages_guard_below = p;
new_zone->bitmap_start = (p + _root->system_page_size);
void *bitmap_pages_guard_above = (void *) ROUND_UP_PAGE((uintptr_t) p + (new_zone->bitmap_size + _root->system_page_size));
create_guard_page(bitmap_pages_guard_below);
create_guard_page(bitmap_pages_guard_above);
/* Bitmap pages are accessed often and usually in sequential order */
madvise(new_zone->bitmap_start, new_zone->bitmap_size, MADV_WILLNEED);
madvise(new_zone->bitmap_start, new_zone->bitmap_size, MADV_SEQUENTIAL);
char *name;
if(internal == true) {
name = INTERNAL_UZ_NAME;
} else {
name = CUSTOM_UZ_NAME;
}
/* All user pages use MAP_POPULATE. This might seem like we are asking
* the kernel to commit a lot of memory for us that we may never use
* but when we call create_canary_chunks() that will happen anyway */
p = mmap_rw_pages(ZONE_USER_SIZE + (_root->system_page_size << 1), true, name);
void *user_pages_guard_below = p;
new_zone->user_pages_start = (p + _root->system_page_size);
void *user_pages_guard_above = (void *) ROUND_UP_PAGE((uintptr_t) p + (ZONE_USER_SIZE + _root->system_page_size));
create_guard_page(user_pages_guard_below);
create_guard_page(user_pages_guard_above);
/* User pages will be accessed in an unpredictable order */
madvise(new_zone->user_pages_start, ZONE_USER_SIZE, MADV_WILLNEED);
madvise(new_zone->user_pages_start, ZONE_USER_SIZE, MADV_RANDOM);
new_zone->index = _root->zones_used;
new_zone->canary_secret = rand_uint64();
new_zone->pointer_mask = rand_uint64();
create_canary_chunks(new_zone);
/* When we create a new zone its an opportunity to
* populate our free list cache with random entries */
fill_free_bit_slot_cache(new_zone);
/* Prime the next_free_bit_slot member */
get_next_free_bit_slot(new_zone);
#if CPU_PIN
new_zone->cpu_core = sched_getcpu();
#endif
POISON_ZONE(new_zone);
MASK_ZONE_PTRS(new_zone);
_root->zones_used++;
return new_zone;
}
/* Iterate through a zone bitmap a dword at a time
* looking for empty holes (i.e. slot == 0) */
INTERNAL_HIDDEN bit_slot_t iso_scan_zone_free_slot(iso_alloc_zone *zone) {
bitmap_index_t *bm = (bitmap_index_t *) zone->bitmap_start;
bit_slot_t bit_slot = BAD_BIT_SLOT;
bitmap_index_t max_bm_idx = GET_MAX_BITMASK_INDEX(zone);
/* Iterate the entire bitmap a dword at a time */
for(bitmap_index_t i = 0; i < max_bm_idx; i++) {
/* If the byte is 0 then there are some free
* slots we can use at this location */
if(bm[i] == 0x0) {
bit_slot = (i << BITS_PER_QWORD_SHIFT);
return bit_slot;
}
}
return bit_slot;
}
/* This function scans an entire bitmap bit-by-bit
* and returns the first free bit position. In a heavily
* used zone this function will be slow to search */
INTERNAL_HIDDEN bit_slot_t iso_scan_zone_free_slot_slow(iso_alloc_zone *zone) {
bitmap_index_t *bm = (bitmap_index_t *) zone->bitmap_start;
bit_slot_t bit_slot = BAD_BIT_SLOT;
bitmap_index_t max_bm_idx = GET_MAX_BITMASK_INDEX(zone);
int64_t bit;
for(bitmap_index_t i = 0; i < max_bm_idx; i++) {
for(int64_t j = 0; j < BITS_PER_QWORD; j += BITS_PER_CHUNK) {
bit = GET_BIT(bm[i], j);
if(bit == 0) {
bit_slot = (i << BITS_PER_QWORD_SHIFT) + j;
return bit_slot;
}
}
}
return bit_slot;
}
INTERNAL_HIDDEN iso_alloc_zone *is_zone_usable(iso_alloc_zone *zone, size_t size) {
/* This zone may fit this chunk but if the zone was
* created for chunks more than N* larger than the
* requested allocation size then we would be wasting
* a lot of memory by using it. We only do this for
* sizes beyond ZONE_1024 bytes. In other words we can
* live with some wasted space in zones that manage
* chunks smaller than ZONE_1024 */
if(zone->internally_managed == true && size > ZONE_1024 && zone->chunk_size >= (size << WASTED_SZ_MULTIPLIER_SHIFT)) {
return NULL;
}
if(zone->next_free_bit_slot != BAD_BIT_SLOT) {
return zone;
}
UNMASK_ZONE_PTRS(zone);
/* If the cache for this zone is empty we should
* refill it to make future allocations faster
* for all threads */
if(zone->free_bit_slot_cache_usable >= zone->free_bit_slot_cache_index) {
fill_free_bit_slot_cache(zone);
}
bit_slot_t bit_slot = get_next_free_bit_slot(zone);
if(LIKELY(bit_slot != BAD_BIT_SLOT)) {
MASK_ZONE_PTRS(zone);
return zone;
}
/* Free list failed, use a fast search */
bit_slot = iso_scan_zone_free_slot(zone);
if(UNLIKELY(bit_slot == BAD_BIT_SLOT)) {
/* Fast search failed, search bit by bit */
bit_slot = iso_scan_zone_free_slot_slow(zone);
MASK_ZONE_PTRS(zone);
/* This zone may be entirely full, try the next one
* but mark this zone full so future allocations can
* take a faster path */
if(bit_slot == BAD_BIT_SLOT) {
zone->is_full = true;
return NULL;
} else {
zone->next_free_bit_slot = bit_slot;
return zone;
}
} else {
zone->next_free_bit_slot = bit_slot;
MASK_ZONE_PTRS(zone);
return zone;
}
}
/* Implements the check for iso_find_zone_fit */
INTERNAL_HIDDEN bool iso_does_zone_fit(iso_alloc_zone *zone, size_t size) {
#if CPU_PIN
if(zone->cpu_core != sched_getcpu()) {
return false;
}
#endif
/* Don't return a zone that handles a size far larger
* than we need. This could lead to high memory usage
* depending on allocation patterns but helps enforce
* spatial separation based on sized */
if(zone->chunk_size >= ZONE_1024 && size <= ZONE_128) {
return false;
}
if(zone->chunk_size < size || zone->internally_managed == false || zone->is_full == true) {
return false;
}
/* We found a zone, lets try to find a free slot in it */
zone = is_zone_usable(zone, size);
if(zone == NULL) {
return false;
} else {
return true;
}
}
/* Finds a zone that can fit this allocation request */
INTERNAL_HIDDEN iso_alloc_zone *iso_find_zone_fit(size_t size) {
iso_alloc_zone *zone = NULL;
int32_t i = 0;
#if !SMALL_MEM_STARTUP
/* A simple optimization to find which default zone
* should fit this allocation. If we fail then a
* slower iterative approach is used. The longer a
* program runs the more likely we will fail this
* fast path as default zones may fill up */
if(size >= ZONE_512 && size <= ZONE_8192) {
i = _default_zone_count >> 1;
} else if(size > ZONE_8192) {
i = _default_zone_count;
}
#endif
for(; i < _root->zones_used; i++) {
zone = &_root->zones[i];
bool fits = iso_does_zone_fit(zone, size);
if(fits == true) {
return zone;
}
}
return NULL;
}
INTERNAL_HIDDEN void *_iso_calloc(size_t nmemb, size_t size) {
if(nmemb > (nmemb * size)) {
LOG_AND_ABORT("Call to calloc() will overflow nmemb=%zu size=%zu", nmemb, size);
return NULL;
}
void *p = _iso_alloc(NULL, nmemb * size);
memset(p, 0x0, nmemb * size);
return p;
}
INTERNAL_HIDDEN void *_iso_big_alloc(size_t size) {
size_t new_size = ROUND_UP_PAGE(size);
if(new_size < size || new_size > BIG_SZ_MAX) {
LOG_AND_ABORT("Cannot allocate a big zone of %ld bytes", new_size);
}
size = new_size;
LOCK_BIG_ZONE();
/* Let's first see if theres an existing set of
* pages that can satisfy this allocation request */
iso_alloc_big_zone *big = _root->big_zone_head;
if(big != NULL) {
big = UNMASK_BIG_ZONE_NEXT(_root->big_zone_head);
}
iso_alloc_big_zone *last_big = NULL;
while(big != NULL) {
check_big_canary(big);
if(big->free == true && big->size >= size) {
break;
}
last_big = big;
if(big->next != NULL) {
big = UNMASK_BIG_ZONE_NEXT(big->next);
} else {
big = NULL;
break;
}
}
/* We need to setup a new set of pages */
if(big == NULL) {
/* User data is allocated separately from big zone meta
* data to prevent an attacker from targeting it */
void *user_pages = mmap_rw_pages((_root->system_page_size << BIG_ZONE_USER_PAGE_COUNT_SHIFT) + size, false, BIG_ZONE_UD_NAME);
if(user_pages == NULL) {
UNLOCK_BIG_ZONE();
#if ABORT_ON_NULL
LOG_AND_ABORT("isoalloc configured to abort on NULL");
#endif
return NULL;
}
void *p = mmap_rw_pages((_root->system_page_size * BIG_ZONE_META_DATA_PAGE_COUNT), false, BIG_ZONE_MD_NAME);
/* The first page before meta data is a guard page */
create_guard_page(p);
/* The second page is for meta data and it is placed
* at a random offset from the start of the page */
big = (iso_alloc_big_zone *) (p + _root->system_page_size);
madvise(big, _root->system_page_size, MADV_WILLNEED);
uint32_t random_offset = ALIGN_SZ_DOWN(rand_uint64());
big = (iso_alloc_big_zone *) ((p + _root->system_page_size) + (random_offset % (_root->system_page_size - sizeof(iso_alloc_big_zone))));
big->free = false;
big->size = size;
big->next = NULL;
if(last_big != NULL) {
last_big->next = MASK_BIG_ZONE_NEXT(big);
}
if(_root->big_zone_head == NULL) {
_root->big_zone_head = MASK_BIG_ZONE_NEXT(big);
}
/* Create the guard page after the meta data */
void *next_gp = (p + (_root->system_page_size << 1));
create_guard_page(next_gp);
/* The first page is a guard page */
create_guard_page(user_pages);
/* Tell the kernel we want to access this big zone allocation */
user_pages += _root->system_page_size;
madvise(user_pages, size, MADV_WILLNEED);
madvise(user_pages, size, MADV_RANDOM);
/* The last page beyond user data is a guard page */
void *last_gp = (user_pages + size);
create_guard_page(last_gp);
/* Save a pointer to the user pages */
big->user_pages_start = user_pages;
/* The canaries prevents a linear overwrite of the big
* zone meta data structure from either direction */
big->canary_a = ((uint64_t) big ^ bswap_64((uint64_t) big->user_pages_start) ^ _root->big_zone_canary_secret);
big->canary_b = big->canary_a;
UNLOCK_BIG_ZONE();
return big->user_pages_start;
} else {
check_big_canary(big);
big->free = false;
UNPOISON_BIG_ZONE(big);
UNLOCK_BIG_ZONE();
return big->user_pages_start;
}
}
INTERNAL_HIDDEN void *_iso_alloc_bitslot_from_zone(bit_slot_t bitslot, iso_alloc_zone *zone) {
bitmap_index_t dwords_to_bit_slot = (bitslot >> BITS_PER_QWORD_SHIFT);
int64_t which_bit = WHICH_BIT(bitslot);
void *p = POINTER_FROM_BITSLOT(zone, bitslot);
UNPOISON_ZONE_CHUNK(zone, p);
bitmap_index_t *bm = (bitmap_index_t *) zone->bitmap_start;
/* Read out 64 bits from the bitmap. We will write
* them back before we return. This reduces the
* number of times we have to hit the bitmap page
* which could result in a page fault */
bitmap_index_t b = bm[dwords_to_bit_slot];
if(UNLIKELY(p > zone->user_pages_start + ZONE_USER_SIZE)) {
LOG_AND_ABORT("Allocating an address 0x%p from zone[%d], bit slot %lu %ld bytes %ld pages outside zones user pages 0x%p 0x%p",
p, zone->index, bitslot, p - (zone->user_pages_start + ZONE_USER_SIZE), (p - (zone->user_pages_start + ZONE_USER_SIZE)) / _root->system_page_size,
zone->user_pages_start, zone->user_pages_start + ZONE_USER_SIZE);
}
if(UNLIKELY((GET_BIT(b, which_bit)) != 0)) {
LOG_AND_ABORT("Zone[%d] for chunk size %d cannot return allocated chunk at 0x%p bitmap location @ 0x%p. bit slot was %lu, bit number was %" PRIu64,
zone->index, zone->chunk_size, p, &bm[dwords_to_bit_slot], bitslot, which_bit);
}
/* This chunk was either previously allocated and free'd
* or it's a canary chunk. In either case this means it
* has a canary written in its first dword. Here we check
* that canary and abort if its been corrupted */
#if !ENABLE_ASAN && !DISABLE_CANARY
if((GET_BIT(b, (which_bit + 1))) == 1) {
check_canary(zone, p);
memset(p, 0x0, CANARY_SIZE);
}
#endif
/* Set the in-use bit */
SET_BIT(b, which_bit);
/* The second bit is flipped to 0 while in use. This
* is because a previously in use chunk would have
* a bit pattern of 11 which makes it looks the same
* as a canary chunk. This bit is set again upon free */
UNSET_BIT(b, (which_bit + 1));
bm[dwords_to_bit_slot] = b;
return p;
}
INTERNAL_HIDDEN INLINE size_t next_pow2(size_t sz) {
sz |= sz >> 1;
sz |= sz >> 2;
sz |= sz >> 4;
sz |= sz >> 8;
sz |= sz >> 16;
sz |= sz >> 32;
return sz + 1;
}
INTERNAL_HIDDEN INLINE void populate_thread_caches(iso_alloc_zone *zone) {
#if THREAD_SUPPORT && THREAD_CACHE
if(thread_bit_slot_cache.chunk == NULL) {
bit_slot_t bit_slot = get_next_free_bit_slot(zone);
if(bit_slot != BAD_BIT_SLOT) {
/* We just stole the next free bit slot */
zone->next_free_bit_slot = BAD_BIT_SLOT;
thread_bit_slot_cache.chunk = _iso_alloc_bitslot_from_zone(bit_slot, zone);
thread_bit_slot_cache.chunk_size = zone->chunk_size;
}
}
if(thread_zone_cache_count < THREAD_ZONE_CACHE_SZ) {
thread_zone_cache[thread_zone_cache_count].zone = zone;
thread_zone_cache[thread_zone_cache_count].chunk_size = zone->chunk_size;
thread_zone_cache_count++;
} else {
thread_zone_cache_count = 0;
thread_zone_cache[thread_zone_cache_count].zone = zone;
thread_zone_cache[thread_zone_cache_count].chunk_size = zone->chunk_size;
}
#endif
}
INTERNAL_HIDDEN void *_iso_alloc(iso_alloc_zone *zone, size_t size) {
#if THREAD_SUPPORT && THREAD_CACHE
if(LIKELY(zone == NULL) && size <= SMALL_SZ_MAX && thread_bit_slot_cache.chunk_size >= size && thread_bit_slot_cache.chunk != NULL) {
void *p = thread_bit_slot_cache.chunk;
thread_bit_slot_cache.chunk = NULL;
thread_bit_slot_cache.chunk_size = 0;
return p;
}
#endif
LOCK_ROOT();
if(UNLIKELY(_root == NULL)) {
g_page_size = sysconf(_SC_PAGESIZE);
iso_alloc_initialize_global_root();
}
#if NO_ZERO_ALLOCATIONS
if(size == 0) {
UNLOCK_ROOT();
return _zero_alloc_page;
}
#endif
#if ALLOC_SANITY
/* We only sample allocations smaller than an individual
* page. We are unlikely to find uninitialized reads on
* larger size and it makes tracking them less complex */
size_t sampled_size = ALIGN_SZ_UP(size);
if(sampled_size < _root->system_page_size && _sane_sampled < MAX_SANE_SAMPLES) {
void *ps = _iso_alloc_sample(sampled_size);
if(ps != NULL) {
UNLOCK_ROOT();
return ps;
}
}
#endif
#if HEAP_PROFILER
_iso_alloc_profile();
#endif
/* Allocation requests of SMALL_SZ_MAX bytes or larger are
* handled by the 'big allocation' path. If a zone was
* passed in we abort because its a misuse of the API */
if(LIKELY(size < SMALL_SZ_MAX)) {
#if FUZZ_MODE
_verify_all_zones();
#endif
#if THREAD_SUPPORT && THREAD_CACHE
if(LIKELY(zone == NULL)) {
/* Hot Path: Check the thread cache for a zone this
* thread recently used for an alloc/free operation.
* It's likely we are allocating a similar size chunk
* and this will speed up that operation */
for(int64_t i = 0; i < thread_zone_cache_count; i++) {
if(thread_zone_cache[i].chunk_size >= size) {
bool fit = iso_does_zone_fit(thread_zone_cache[i].zone, size);
if(fit == true) {
zone = thread_zone_cache[i].zone;
break;
}
}
}
}
#endif
bit_slot_t free_bit_slot = BAD_BIT_SLOT;
/* Slow Path: This will iterate through all zones
* looking for a suitable one, this includes the
* zones we cached above */
if(zone == NULL) {
zone = iso_find_zone_fit(size);
}
if(zone != NULL) {
/* We only need to check if the zone is usable
* if it's a custom zone. If we chose this zone
* then its guaranteed to already be usable */
if(zone->internally_managed == false) {
zone = is_zone_usable(zone, size);
if(zone == NULL) {
UNLOCK_ROOT();
#if ABORT_ON_NULL
LOG_AND_ABORT("isoalloc configured to abort on NULL");
#endif
return NULL;
}
}
free_bit_slot = zone->next_free_bit_slot;
} else {
/* Extra Slow Path: We need a new zone in order
* to satisfy this allocation request */
/* The size requested is above default zone sizes
* but we can still create it. iso_new_zone will
* align the requested size for us */
if(size > ZONE_8192) {
zone = _iso_new_zone(size, true);
} else {
/* For chunks smaller than 8192 bytes we
* bump the size up to the next power of 2 */
size = next_pow2(size);
zone = _iso_new_zone(size, true);
}
if(UNLIKELY(zone == NULL)) {
LOG_AND_ABORT("Failed to create a zone for allocation of %zu bytes", size);
}
/* This is a brand new zone, so the fast path
* should always work. Abort if it doesn't */
free_bit_slot = zone->next_free_bit_slot;
if(UNLIKELY(free_bit_slot == BAD_BIT_SLOT)) {
LOG_AND_ABORT("Allocated a new zone with no free bit slots");
}
}
if(UNLIKELY(free_bit_slot == BAD_BIT_SLOT)) {
UNLOCK_ROOT();
#if ABORT_ON_NULL
LOG_AND_ABORT("isoalloc configured to abort on NULL");
#endif
return NULL;
}
UNMASK_ZONE_PTRS(zone);
zone->next_free_bit_slot = BAD_BIT_SLOT;
void *p = _iso_alloc_bitslot_from_zone(free_bit_slot, zone);
populate_thread_caches(zone);
MASK_ZONE_PTRS(zone);
UNLOCK_ROOT();
return p;
} else {
/* It's safe to unlock the root at this point because
* the big zone allocation path uses a different lock */
UNLOCK_ROOT();
if(zone != NULL) {
LOG_AND_ABORT("Allocations of >= %d cannot use custom zones", SMALL_SZ_MAX);
}
return _iso_big_alloc(size);
}
}
INTERNAL_HIDDEN iso_alloc_big_zone *iso_find_big_zone(void *p) {
LOCK_BIG_ZONE();
/* Its possible we are trying to unmap a big allocation */
iso_alloc_big_zone *big_zone = _root->big_zone_head;
if(big_zone != NULL) {
big_zone = UNMASK_BIG_ZONE_NEXT(_root->big_zone_head);
}
while(big_zone != NULL) {
check_big_canary(big_zone);
/* Only a free of the exact address is valid */
if(p == big_zone->user_pages_start) {
UNLOCK_BIG_ZONE();
return big_zone;
}
if(UNLIKELY(p > big_zone->user_pages_start) && UNLIKELY(p < (big_zone->user_pages_start + big_zone->size))) {
LOG_AND_ABORT("Invalid free of big zone allocation at 0x%p in mapping 0x%p", p, big_zone->user_pages_start);
}
if(big_zone->next != NULL) {
big_zone = UNMASK_BIG_ZONE_NEXT(big_zone->next);
} else {
big_zone = NULL;
break;
}
}
UNLOCK_BIG_ZONE();
return NULL;
}
INTERNAL_HIDDEN iso_alloc_zone *iso_find_zone_bitmap_range(void *p) {
iso_alloc_zone *zone = NULL;
#if THREAD_SUPPORT && THREAD_CACHE
/* Hot Path: Check the thread cache for a zone this
* thread recently used for an alloc/free operation */
for(int64_t i = 0; i < thread_zone_cache_count; i++) {
UNMASK_ZONE_PTRS(thread_zone_cache[i].zone);
zone = thread_zone_cache[i].zone;
if(zone->bitmap_start <= p && (zone->bitmap_start + zone->bitmap_size) > p) {
MASK_ZONE_PTRS(zone);
return zone;
}
MASK_ZONE_PTRS(zone);
}
#endif
for(int32_t i = 0; i < _root->zones_used; i++) {
zone = &_root->zones[i];
UNMASK_ZONE_PTRS(zone);
if(zone->bitmap_start <= p && (zone->bitmap_start + zone->bitmap_size) > p) {
MASK_ZONE_PTRS(zone);
return zone;
}
MASK_ZONE_PTRS(zone);
}
return NULL;
}
INTERNAL_HIDDEN iso_alloc_zone *iso_find_zone_range(void *p) {
iso_alloc_zone *zone = NULL;
#if THREAD_SUPPORT && THREAD_CACHE
/* Hot Path: Check the thread cache for a zone this
* thread recently used for an alloc/free operation */
for(int64_t i = 0; i < thread_zone_cache_count; i++) {
UNMASK_ZONE_PTRS(thread_zone_cache[i].zone);
zone = thread_zone_cache[i].zone;
if(zone->user_pages_start <= p && (zone->user_pages_start + ZONE_USER_SIZE) > p) {
MASK_ZONE_PTRS(zone);
return zone;
}
MASK_ZONE_PTRS(zone);
}
#endif
for(int32_t i = 0; i < _root->zones_used; i++) {
zone = &_root->zones[i];
UNMASK_ZONE_PTRS(zone);
if(zone->user_pages_start <= p && (zone->user_pages_start + ZONE_USER_SIZE) > p) {
MASK_ZONE_PTRS(zone);
return zone;
}
MASK_ZONE_PTRS(zone);
}
return NULL;
}
/* Checking canaries under ASAN mode is not trivial. ASAN
* provides a strong guarantee that these chunks haven't
* been modified in some way */
#if ENABLE_ASAN || DISABLE_CANARY
INTERNAL_HIDDEN INLINE void check_big_canary(iso_alloc_big_zone *big) {
return;
}
INTERNAL_HIDDEN INLINE void write_canary(iso_alloc_zone *zone, void *p) {
return;
}
/* Verify the canary value in an allocation */
INTERNAL_HIDDEN INLINE void check_canary(iso_alloc_zone *zone, void *p) {
return;
}
INTERNAL_HIDDEN int64_t check_canary_no_abort(iso_alloc_zone *zone, void *p) {
return OK;
}
#else
/* Verifies both canaries in a big zone structure. This
* is a fast operation so we call it anytime we iterate
* through the linked list of big zones */
INTERNAL_HIDDEN INLINE void check_big_canary(iso_alloc_big_zone *big) {
uint64_t canary = ((uint64_t) big ^ bswap_64((uint64_t) big->user_pages_start) ^ _root->big_zone_canary_secret);
if(UNLIKELY(big->canary_a != canary)) {
LOG_AND_ABORT("Big zone 0x%p bottom canary has been corrupted! Value: 0x%x Expected: 0x%x", big, big->canary_a, canary);
}
if(UNLIKELY(big->canary_b != canary)) {
LOG_AND_ABORT("Big zone 0x%p top canary has been corrupted! Value: 0x%x Expected: 0x%x", big, big->canary_a, canary);
}
}
/* All free chunks get a canary written at both
* the start and end of their chunks. These canaries
* are verified when adjacent chunks are allocated,
* freed, or when the API requests validation. We
* sacrifice the high byte in entropy to prevent
* unbounded string reads from leaking it */
INTERNAL_HIDDEN INLINE void write_canary(iso_alloc_zone *zone, void *p) {
uint64_t canary = (zone->canary_secret ^ (uint64_t) p) & CANARY_VALIDATE_MASK;
memcpy(p, &canary, CANARY_SIZE);
p += (zone->chunk_size - sizeof(uint64_t));
memcpy(p, &canary, CANARY_SIZE);
}
/* Verify the canary value in an allocation */
INTERNAL_HIDDEN INLINE void check_canary(iso_alloc_zone *zone, void *p) {
uint64_t v = *((uint64_t *) p);
uint64_t canary = (zone->canary_secret ^ (uint64_t) p) & CANARY_VALIDATE_MASK;
if(UNLIKELY(v != canary)) {
LOG_AND_ABORT("Canary at beginning of chunk 0x%p in zone[%d][%d byte chunks] has been corrupted! Value: 0x%x Expected: 0x%x", p, zone->index, zone->chunk_size, v, canary);
}
v = *((uint64_t *) (p + zone->chunk_size - sizeof(uint64_t)));
if(UNLIKELY(v != canary)) {
LOG_AND_ABORT("Canary at end of chunk 0x%p in zone[%d][%d byte chunks] has been corrupted! Value: 0x%x Expected: 0x%x", p, zone->index, zone->chunk_size, v, canary);
}
}
INTERNAL_HIDDEN int64_t check_canary_no_abort(iso_alloc_zone *zone, void *p) {
uint64_t v = *((uint64_t *) p);
uint64_t canary = (zone->canary_secret ^ (uint64_t) p) & CANARY_VALIDATE_MASK;
if(UNLIKELY(v != canary)) {
LOG("Canary at beginning of chunk 0x%p in zone[%d] has been corrupted! Value: 0x%x Expected: 0x%x", p, zone->index, v, canary);
return ERR;
}
v = *((uint64_t *) (p + zone->chunk_size - sizeof(uint64_t)));
if(UNLIKELY(v != canary)) {
LOG("Canary at end of chunk 0x%p in zone[%d] has been corrupted! Value: 0x%x Expected: 0x%x", p, zone->index, v, canary);
return ERR;
}
return OK;
}
#endif
INTERNAL_HIDDEN void iso_free_big_zone(iso_alloc_big_zone *big_zone, bool permanent) {
LOCK_BIG_ZONE();
if(UNLIKELY(big_zone->free == true)) {
LOG_AND_ABORT("Double free of big zone 0x%p has been detected!", big_zone);
}
#if !ENABLE_ASAN && SANITIZE_CHUNKS
memset(big_zone->user_pages_start, POISON_BYTE, big_zone->size);
#endif
madvise(big_zone->user_pages_start, big_zone->size, MADV_DONTNEED);
/* If this isn't a permanent free then all we need
* to do is sanitize the mapping and mark it free.
* The pages backing the big zone can be reused. */
if(LIKELY(permanent == false)) {
POISON_BIG_ZONE(big_zone);
big_zone->free = true;
} else {
iso_alloc_big_zone *big = _root->big_zone_head;
if(big != NULL) {
big = UNMASK_BIG_ZONE_NEXT(_root->big_zone_head);
}
if(big == big_zone) {
_root->big_zone_head = NULL;
} else {
/* We need to remove this entry from the list */
while(big != NULL) {
check_big_canary(big);
if(UNMASK_BIG_ZONE_NEXT(big->next) == big_zone) {
big->next = UNMASK_BIG_ZONE_NEXT(big_zone->next);
break;
}
if(big->next != NULL) {
big = UNMASK_BIG_ZONE_NEXT(big->next);
} else {
big = NULL;
}
}
}
if(big == NULL) {
LOG_AND_ABORT("The big zone list has been corrupted, unable to find big zone 0x%p", big_zone);
}
mprotect_pages(big_zone->user_pages_start, big_zone->size, PROT_NONE);
memset(big_zone, POISON_BYTE, sizeof(iso_alloc_big_zone));
/* Big zone meta data is at a random offset from its base page */
mprotect_pages(((void *) ROUND_DOWN_PAGE((uintptr_t) big_zone)), _root->system_page_size, PROT_NONE);
}
UNLOCK_BIG_ZONE();
}
INTERNAL_HIDDEN void iso_free_chunk_from_zone(iso_alloc_zone *zone, void *p, bool permanent) {
/* Ensure the pointer is properly aligned */
if(UNLIKELY(IS_ALIGNED((uintptr_t) p) != 0)) {
LOG_AND_ABORT("Chunk at 0x%p of zone[%d] is not %d byte aligned", p, zone->index, ALIGNMENT);
}
uint64_t chunk_offset = (uint64_t) (p - zone->user_pages_start);
/* Ensure the pointer is a multiple of chunk size */
if(UNLIKELY((chunk_offset % zone->chunk_size) != 0)) {
LOG_AND_ABORT("Chunk at 0x%p is not a multiple of zone[%d] chunk size %d. Off by %lu bits", p, zone->index, zone->chunk_size, (chunk_offset % zone->chunk_size));
}
size_t chunk_number = (chunk_offset / zone->chunk_size);
bit_slot_t bit_slot = (chunk_number << BITS_PER_CHUNK_SHIFT);
bit_slot_t dwords_to_bit_slot = (bit_slot >> BITS_PER_QWORD_SHIFT);
if(UNLIKELY((zone->bitmap_start + dwords_to_bit_slot) >= (zone->bitmap_start + zone->bitmap_size))) {
LOG_AND_ABORT("Cannot calculate this chunks location in the bitmap 0x%p", p);
}
int64_t which_bit = WHICH_BIT(bit_slot);
bitmap_index_t *bm = (bitmap_index_t *) zone->bitmap_start;
/* Read out 64 bits from the bitmap. We will write
* them back before we return. This reduces the
* number of times we have to hit the bitmap page
* which could result in a page fault */
bitmap_index_t b = bm[dwords_to_bit_slot];
/* Double free detection */
if(UNLIKELY((GET_BIT(b, which_bit)) == 0)) {
LOG_AND_ABORT("Double free of chunk 0x%p detected from zone[%d] dwords_to_bit_slot=%lu bit_slot=%" PRIu64, p, zone->index, dwords_to_bit_slot, bit_slot);
}
/* Set the next bit so we know this chunk was used */
SET_BIT(b, (which_bit + 1));
/* Unset the bit and write the value into the bitmap
* if this is not a permanent free. A permanent free
* means this chunk will be marked as if it is a canary */
if(LIKELY(permanent == false)) {
UNSET_BIT(b, which_bit);
insert_free_bit_slot(zone, bit_slot);
zone->is_full = false;
#if !ENABLE_ASAN && SANITIZE_CHUNKS
iso_clear_user_chunk(p, zone->chunk_size);
#endif
} else {
iso_clear_user_chunk(p, zone->chunk_size);
}
bm[dwords_to_bit_slot] = b;
/* Now that we have free'd this chunk lets validate the
* chunks before and after it. If they were previously
* used and currently free they should have canaries
* we can verify */
#if !ENABLE_ASAN && !DISABLE_CANARY
write_canary(zone, p);
if((chunk_number + 1) != GET_CHUNK_COUNT(zone)) {
bit_slot_t bit_slot_over = ((chunk_number + 1) << BITS_PER_CHUNK_SHIFT);
dwords_to_bit_slot = (bit_slot_over >> BITS_PER_QWORD_SHIFT);
which_bit = WHICH_BIT(bit_slot_over);
if((GET_BIT(bm[dwords_to_bit_slot], (which_bit + 1))) == 1) {
void *p_over = POINTER_FROM_BITSLOT(zone, bit_slot_over);
check_canary(zone, p_over);
}
}
if(chunk_number != 0) {
bit_slot_t bit_slot_under = ((chunk_number - 1) << BITS_PER_CHUNK_SHIFT);
dwords_to_bit_slot = (bit_slot_under >> BITS_PER_QWORD_SHIFT);
which_bit = WHICH_BIT(bit_slot_under);
if((GET_BIT(bm[dwords_to_bit_slot], (which_bit + 1))) == 1) {
void *p_under = POINTER_FROM_BITSLOT(zone, bit_slot_under);
check_canary(zone, p_under);
}
}
#endif
POISON_ZONE_CHUNK(zone, p);
populate_thread_caches(zone);
}
INTERNAL_HIDDEN void _iso_free(void *p, bool permanent) {
if(p == NULL) {
return;
}
#if NO_ZERO_ALLOCATIONS
if(p == _zero_alloc_page) {
return;
}
#endif
#if ALLOC_SANITY
int32_t r = _iso_alloc_free_sane_sample(p);
if(r == OK) {
return;
}
#endif
LOCK_ROOT();
#if FUZZ_MODE
_verify_all_zones();
#endif
iso_alloc_zone *zone = iso_find_zone_range(p);
if(LIKELY(zone != NULL)) {
UNMASK_ZONE_PTRS(zone);
iso_free_chunk_from_zone(zone, p, permanent);
MASK_ZONE_PTRS(zone);
#if UAF_PTR_PAGE
if(UNLIKELY((rand_uint64() % UAF_PTR_PAGE_ODDS) == 1)) {
_iso_alloc_ptr_search(p, true);
}
#endif
UNLOCK_ROOT();
} else {
iso_alloc_big_zone *big_zone = iso_find_big_zone(p);
UNLOCK_ROOT();
if(UNLIKELY(big_zone == NULL)) {
LOG_AND_ABORT("Could not find any zone for allocation at 0x%p", p);
}
iso_free_big_zone(big_zone, permanent);
}
}
/* Disable all use of iso_alloc by protecting the _root */
INTERNAL_HIDDEN void _iso_alloc_protect_root(void) {
LOCK_ROOT();
mprotect_pages(_root, sizeof(iso_alloc_root), PROT_NONE);
}
/* Unprotect all use of iso_alloc by allowing R/W of the _root */
INTERNAL_HIDDEN void _iso_alloc_unprotect_root(void) {
mprotect_pages(_root, sizeof(iso_alloc_root), PROT_READ | PROT_WRITE);
UNLOCK_ROOT();
}
INTERNAL_HIDDEN size_t _iso_chunk_size(void *p) {
if(p == NULL) {
return 0;
}
#if ALLOC_SANITY
LOCK_SANITY_CACHE();
_sane_allocation_t *sane_alloc = _get_sane_alloc(p);
if(sane_alloc != NULL) {
size_t orig_size = sane_alloc->orig_size;
UNLOCK_SANITY_CACHE();
return orig_size;
}
UNLOCK_SANITY_CACHE();
#endif
LOCK_ROOT();
/* We cannot return NULL here, we abort instead */
iso_alloc_zone *zone = iso_find_zone_range(p);
if(UNLIKELY(zone == NULL)) {
UNLOCK_ROOT();
iso_alloc_big_zone *big_zone = iso_find_big_zone(p);
if(big_zone == NULL) {
LOG_AND_ABORT("Could not find any zone for allocation at 0x%p", p);
}
return big_zone->size;
}
UNLOCK_ROOT();
return zone->chunk_size;
}
INTERNAL_HIDDEN uint64_t _iso_alloc_detect_leaks_in_zone(iso_alloc_zone *zone) {
LOCK_ROOT();
uint64_t leaks = _iso_alloc_zone_leak_detector(zone, false);
UNLOCK_ROOT();
return leaks;
}
INTERNAL_HIDDEN uint64_t _iso_alloc_mem_usage() {
LOCK_ROOT();
uint64_t mem_usage = __iso_alloc_mem_usage();
mem_usage += _iso_alloc_big_zone_mem_usage();
UNLOCK_ROOT();
return mem_usage;
}
INTERNAL_HIDDEN uint64_t _iso_alloc_big_zone_mem_usage() {
LOCK_BIG_ZONE();
uint64_t mem_usage = __iso_alloc_big_zone_mem_usage();
UNLOCK_BIG_ZONE();
return mem_usage;
}
INTERNAL_HIDDEN uint64_t _iso_alloc_zone_mem_usage(iso_alloc_zone *zone) {
LOCK_ROOT();
uint64_t zone_mem_usage = __iso_alloc_zone_mem_usage(zone);
UNLOCK_ROOT();
return zone_mem_usage;
}
#if UNIT_TESTING
/* Some tests require getting access to IsoAlloc internals
* that aren't supported by the API. We never want these
* in release builds of the library */
EXTERNAL_API iso_alloc_root *_get_root(void) {
return _root;
}
#endif
<|start_filename|>src/iso_alloc_search.c<|end_filename|>
/* iso_alloc_search.c - A secure memory allocator
* Copyright 2021 - <EMAIL> */
#include "iso_alloc_internal.h"
/* Search all zones for either the first instance of a pointer
* value and return it or overwrite the first potentially
* dangling pointer with the address of an unmapped page */
INTERNAL_HIDDEN void *_iso_alloc_ptr_search(void *n, bool poison) {
uint8_t *h = NULL;
for(int32_t i = 0; i < _root->zones_used; i++) {
iso_alloc_zone *zone = &_root->zones[i];
UNMASK_ZONE_PTRS(zone);
h = zone->user_pages_start;
while(h <= (uint8_t *) (zone->user_pages_start + ZONE_USER_SIZE - sizeof(uint64_t))) {
if(LIKELY((uint64_t) * (uint64_t *) h != (uint64_t) n)) {
h++;
} else {
if(poison == false) {
MASK_ZONE_PTRS(zone);
return h;
} else {
#if UAF_PTR_PAGE
*(uint64_t *) h = UAF_PTR_PAGE_ADDR;
MASK_ZONE_PTRS(zone);
return h;
#endif
}
}
}
MASK_ZONE_PTRS(zone);
}
return NULL;
}
#if EXPERIMENTAL
/* These functions are all experimental and subject to change */
/* Search the stack for pointers into IsoAlloc zones. If
* stack_start is NULL then this function starts searching
* from the environment variables which should be mapped
* just below the stack */
INTERNAL_HIDDEN void _iso_alloc_search_stack(uint8_t *stack_start) {
if(stack_start == NULL) {
stack_start = (uint8_t *) ENVIRON;
if(stack_start == NULL) {
return;
}
}
/* The end of our stack is the address of this local */
uint8_t *stack_end;
stack_end = (uint8_t *) &stack_end;
uint64_t tps = UINT32_MAX;
uint8_t *current = stack_start;
uint64_t max_ptr = 0x800000000000;
while(current > stack_end) {
/* Iterating through zones is expensive so this quickly
* decides on values that are unlikely to be pointers
* into zone user pages */
if(*(int64_t *) current <= tps || *(int64_t *) current >= max_ptr || (*(int64_t *) current & 0xffffff) == 0) {
//LOG("Ignoring pointer start=%p end=%p stack_ptr=%p value=%lx", stack_start, stack_end, current, *(int64_t *)current);
current--;
continue;
}
uint64_t *p = (uint64_t *) *(int64_t *) current;
iso_alloc_zone *zone = iso_find_zone_range(p);
current--;
if(zone != NULL) {
UNMASK_ZONE_PTRS(zone);
/* Ensure the pointer is properly aligned */
if(UNLIKELY(IS_ALIGNED((uintptr_t) p) != 0)) {
LOG_AND_ABORT("Chunk at 0x%p of zone[%d] is not %d byte aligned", p, zone->index, ALIGNMENT);
}
uint64_t chunk_offset = (uint64_t) (p - (uint64_t *) zone->user_pages_start);
LOG("zone[%d] user_pages_start=%p value=%p %lu %d", zone->index, zone->user_pages_start, p, chunk_offset, zone->chunk_size);
/* Ensure the pointer is a multiple of chunk size */
if(UNLIKELY((chunk_offset % zone->chunk_size) != 0)) {
LOG("Chunk at %p is not a multiple of zone[%d] chunk size %d. Off by %" PRIu64 " bits", p, zone->index, zone->chunk_size, (chunk_offset % zone->chunk_size));
MASK_ZONE_PTRS(zone);
continue;
}
size_t chunk_number = (chunk_offset / zone->chunk_size);
bit_slot_t bit_slot = (chunk_number * BITS_PER_CHUNK);
bit_slot_t dwords_to_bit_slot = (bit_slot / BITS_PER_QWORD);
if(UNLIKELY((zone->bitmap_start + dwords_to_bit_slot) >= (zone->bitmap_start + zone->bitmap_size))) {
LOG("Cannot calculate this chunks location in the bitmap %p", p);
MASK_ZONE_PTRS(zone);
continue;
}
int64_t which_bit = (bit_slot % BITS_PER_QWORD);
bitmap_index_t *bm = (bitmap_index_t *) zone->bitmap_start;
bitmap_index_t b = bm[dwords_to_bit_slot];
if(UNLIKELY((GET_BIT(b, which_bit)) == 0)) {
LOG("Chunk at %p is in-use", p);
} else {
LOG("Chunk at %p is free", p);
}
MASK_ZONE_PTRS(zone);
}
zone = iso_find_zone_bitmap_range(p);
if(zone != NULL) {
LOG_AND_ABORT("Pointer to bitmap for zone[%d] found in stack @ %p", zone->index, p);
}
}
}
#endif
<|start_filename|>tests/alloc_fuzz.c<|end_filename|>
/* iso_alloc alloc_fuzz.c
* Copyright 2021 - <EMAIL> */
/* This test is not meant to be run as a part of the IsoAlloc
* test suite. It should be run stand alone during development
* work to catch any bugs you introduce */
#include "iso_alloc.h"
#include "iso_alloc_internal.h"
#include <time.h>
uint32_t allocation_sizes[] = {ZONE_16, ZONE_32, ZONE_64, ZONE_128,
ZONE_256, ZONE_512, ZONE_1024,
ZONE_2048, ZONE_4096, ZONE_8192,
SMALL_SZ_MAX / 4, SMALL_SZ_MAX / 2, SMALL_SZ_MAX};
uint32_t array_sizes[] = {16, 32, 64, 128, 256, 512, 1024, 2048};
uint32_t alloc_count;
/* Parameters for controlling probability of leaking a chunk.
* This will add up very quickly with the speed of allocations.
* This should exercise all code including new internally
* managed zone allocation. Eventually we get OOM and SIGKILL */
#define LEAK_K 1000
#define LEAK_V 8
iso_alloc_zone_handle *custom_zone;
#define NEW_ZONE_K 1000
#define NEW_ZONE_V 1
#define DESTROY_ZONE_K 1000
#define DESTROY_ZONE_V 8
#define MAYBE_VALIDATE_ZONES() \
if((rand() % 10) == 1) { \
iso_verify_zones(); \
}
int reallocate(size_t array_size, size_t allocation_size) {
void *p[array_size];
memset(p, 0x0, array_size);
for(int i = 0; i < array_size; i++) {
if(allocation_size == 0) {
allocation_size = allocation_sizes[(rand() % sizeof(allocation_sizes) / sizeof(uint32_t))] + (rand() % 32);
}
void *d = iso_alloc(allocation_size / 2);
memset(d, 0x0, allocation_size / 2);
p[i] = iso_realloc(d, allocation_size);
if(p[i] == NULL) {
LOG_AND_ABORT("Failed to allocate %ld bytes after %d total allocations", allocation_size, alloc_count);
}
alloc_count++;
/* Free every other allocation */
if(i % 2) {
iso_free(p[i]);
p[i] = NULL;
alloc_count--;
}
}
MAYBE_VALIDATE_ZONES();
/* Free the remaining allocations */
for(int i = 0; i < array_size; i++) {
if(p[i] != NULL && ((rand() % LEAK_K) > LEAK_V)) {
iso_free(p[i]);
alloc_count--;
}
}
return OK;
}
int callocate(size_t array_size, size_t allocation_size) {
void *p[array_size];
memset(p, 0x0, array_size);
for(int i = 0; i < array_size; i++) {
if(allocation_size == 0) {
allocation_size = allocation_sizes[(rand() % sizeof(allocation_sizes) / sizeof(uint32_t))] + (rand() % 32);
}
p[i] = iso_calloc(1, allocation_size);
if(p[i] == NULL) {
LOG_AND_ABORT("Failed to allocate %ld bytes after %d total allocations", allocation_size, alloc_count);
}
alloc_count++;
/* Free every other allocation */
if(i % 2) {
iso_free(p[i]);
p[i] = NULL;
alloc_count--;
}
}
MAYBE_VALIDATE_ZONES();
/* Free the remaining allocations */
for(int i = 0; i < array_size; i++) {
if(p[i] != NULL && ((rand() % LEAK_K) > LEAK_V)) {
iso_free(p[i]);
alloc_count--;
}
}
return OK;
}
int allocate(size_t array_size, size_t allocation_size) {
void *p[array_size];
memset(p, 0x0, array_size);
if(rand() % 100 == 1) {
if(custom_zone != NULL) {
iso_alloc_destroy_zone(custom_zone);
}
custom_zone = iso_alloc_new_zone(allocation_size);
}
for(int i = 0; i < array_size; i++) {
if(allocation_size == 0) {
allocation_size = allocation_sizes[(rand() % sizeof(allocation_sizes) / sizeof(uint32_t))] + (rand() % 32);
}
if(rand() % 100 == 1 && custom_zone != NULL && allocation_size < SMALL_SZ_MAX) {
p[i] = iso_alloc_from_zone(custom_zone, allocation_size);
} else {
p[i] = iso_alloc(allocation_size);
}
if(p[i] == NULL) {
LOG_AND_ABORT("Failed to allocate %ld bytes after %d total allocations", allocation_size, alloc_count);
}
alloc_count++;
/* Free every other allocation */
if(i % 2) {
iso_free(p[i]);
p[i] = NULL;
alloc_count--;
}
}
MAYBE_VALIDATE_ZONES();
/* Free the remaining allocations */
for(int i = 0; i < array_size; i++) {
if(p[i] != NULL && ((rand() % LEAK_K) > LEAK_V)) {
iso_free(p[i]);
alloc_count--;
}
}
if(rand() % 10 == 1) {
iso_alloc_destroy_zone(custom_zone);
custom_zone = NULL;
}
return OK;
}
int main(int argc, char *argv[]) {
alloc_count = 0;
custom_zone = NULL;
while(1) {
for(int i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
for(int z = 0; z < sizeof(allocation_sizes) / sizeof(uint32_t); z++) {
allocate(array_sizes[i], allocation_sizes[z]);
}
}
for(int i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
allocate(array_sizes[i], 0);
}
for(int i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
for(int z = 0; z < sizeof(allocation_sizes) / sizeof(uint32_t); z++) {
callocate(array_sizes[i], allocation_sizes[z]);
}
}
for(int i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
callocate(array_sizes[i], 0);
}
for(int i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
for(int z = 0; z < sizeof(allocation_sizes) / sizeof(uint32_t); z++) {
reallocate(array_sizes[i], allocation_sizes[z]);
}
}
for(int i = 0; i < sizeof(array_sizes) / sizeof(uint32_t); i++) {
reallocate(array_sizes[i], 0);
}
LOG("Total leaked allocations: %d", alloc_count);
}
return 0;
}
<|start_filename|>tests/leaks_test.c<|end_filename|>
/* iso_alloc leaks.c
* Copyright 2021 - <EMAIL> */
#include "iso_alloc.h"
#include "iso_alloc_internal.h"
int main(int argc, char *argv[]) {
void *p[16];
int32_t leak = 0;
for(int32_t i = 0; i < 16; i++) {
p[i] = iso_alloc(i * i);
/* Free a single chunk */
if(i == 1) {
iso_free(p[i]);
} else {
leak++;
}
}
for(int32_t i = 0; i < 16; i++) {
LOG("p[%d] (%p) = %p", i, &p[i], p[i]);
}
iso_verify_zones();
int32_t r = iso_alloc_detect_leaks();
LOG("Total leaks detected: %d %p of %d", r, p, leak);
return r;
}
| struct/isoalloc |
<|start_filename|>node_modules/locale/examples/express.js<|end_filename|>
var http = require("http")
, express = require("express")
, locale = require("../lib")
, supported = ["en", "en_US", "ja"]
, app = express.createServer(locale(supported))
app.get("/", function(req, res) {
res.header("Content-Type", "text/plain")
res.send(
"You asked for: " + req.headers["accept-language"] + "\n" +
"We support: " + supported + "\n" +
"Our default is: " + locale.Locale["default"] + "\n" +
"The best match is: " + req.locale + "\n"
)
})
app.listen(8000)
<|start_filename|>node_modules/email-templates/node_modules/juice/node_modules/superagent/test.js<|end_filename|>
var request = require('./');
var http = require('http');
var parse = require('url').parse;
var url = 'http://arstechnica.com/information-technology/2013/05/fios-customer-discovers-the-limits-of-unlimited-data-77-tb-in-month/';
var n = 10;
// var options = parse(url);
// options.method = 'HEAD';
// options.agent = false;
// while (n--) {
// var req = http.request(options);
// req.on('response', function(res){
// console.log(res.statusCode);
// });
// req.on('error', function(err){
// console.log(err.message);
// });
// req.end();
// }
while (n--) {
request
.head(url)
.end(function(err, res){
if (err) return console.error(err.message);
console.log(res.status);
})
}
| etrusco74/darkroomlocator-server |
<|start_filename|>logger.js<|end_filename|>
const { version } = require('./package.json');
const chalk = require('chalk');
const moment = require('moment');
const readline = require('readline');
readline.emitKeypressEvents(process.stdin);
let api = {};
process.stdin.on('keypress', (char, key) => {
if (key && key.ctrl && key.name == 'c') {
api.info(chalk.red('Exiting program...'));
process.exit()
}
});
api.intro = function(instances, proxies) {
console.log('');
console.log(chalk.bgBlack.white('Adidas Bruteforcer '), chalk.bold(' v' + version));
console.log(chalk.dim(`Loading ${instances} instances with ${proxies} proxies...`));
console.log('');
};
api.info = function(instance, message) {
console.log(chalk.bgBlackBright.white(`Instance ${instance} `), chalk.dim(message));
};
api.error = function(instance, error) {
console.log(chalk.bgRed.white(`Instance ${instance}`), error);
};
api.success = function(instance) {
console.log(chalk.green(`Cart page on ${instance}!`), chalk.dim('—'), chalk.dim(moment().format('hh:mm:ss')));
console.log('');
};
module.exports = api;
<|start_filename|>browser-bot.js<|end_filename|>
const puppeteer = require('puppeteer-extra')
const fetch = require('node-fetch')
const notifier = require('node-notifier');
const path = require('path');
const GOOGLE_COOKIES = require('./cookies.json');
const logger = require('./logger');
const regions = require('./regions')
const $ = require('cheerio');
const querystring = require('querystring');
const prettier = require('prettier');
const atob = require('atob');
const btoa = require('btoa');
const fs = require('fs');
const pluginStealth = require("puppeteer-extra-plugin-stealth");
const RecaptchaPlugin = require('puppeteer-extra-plugin-recaptcha_v2')
puppeteer.use(pluginStealth());
var config;
if (fs.existsSync(".git")) {
config = require("./dev.config.json");
} else {
config = require("./config.json");
}
if (config.twocaptcha.enabled && config.twocaptcha.apiKey != "")
puppeteer.use(
RecaptchaPlugin({
provider: { id: '2captcha', token: config.twocaptcha.apiKey },
visualFeedback: true // colorize reCAPTCHAs (violet = detected, green = solved)
})
);
/*
* Global Vars
*/
// Holds url that is first navigated to
var baseUrl;
// Holds product ID when it is detected
let PID = null;
// Contains list of all sizes reported by the server
let availibility = [];
// Get sizes to cart
let sizesToCart = [];
if (config.autoCart.sizes != "any" && config.autoCart.sizes != "")
sizesToCart = config.autoCart.sizes.split(',');
// Store region details
const region = regions.getRegion(config.region)
/*
* Urls
* TODO: chamge these to suport other regions
*/
const checkoutUrl = `https://www.adidas.${region.domain}/on/demandware.store/Sites-adidas-${region.code}-Site/${region.language}_${region.code}/COShipping-Show`;
const paymentUrl = `https://www.adidas.${region.domain}/on/demandware.store/Sites-adidas-${region.code}-Site/${region.language}_${region.code}/COSummary2-Start`;
const cartUrl = `https://www.adidas.${region.domain}/api/cart_items?sitePath=${region.code}`;
const shippingSubmitUrl = `https://www.adidas.${region.domain}/on/demandware.store/Sites-adidas-${region.code}-Site/${region.language}_${region.code}/COShipping-Submit`;
const paymentSubmitUrl = `https://www.adidas.${region.domain}/on/demandware.store/Sites-adidas-${region.code}-Site/${region.language}_${region.code}/COPayment-HandlePaymentForm`;
module.exports = class Bot {
constructor(options) {
this.browser = null;
this.page = null;
this.captcha = false;
this.captchaSolution = "";
this.instance = options.instance;
this.proxy = options.proxy;
}
async start() {
let args;
if (this.proxy != null) {
args = [
'--no-sandbox',
`--window-size=${config.windowWidth},${config.windowHeight}`,
`--proxy-server=${this.proxy}`
];
} else {
args = [
'--no-sandbox',
`--window-size=${config.windowWidth},${config.windowHeight}`,
];
}
// Launch the browser
this.browser = await puppeteer.launch({
args,
headless: config.headless,
ignoreHTTPSErrors: true,
userDataDir: path.resolve('saves', 'chrome_' + this.instance)
});
// Add google cookies to browser if provided
if (Object.keys(GOOGLE_COOKIES).length != 0) {
const cookiePage = await this.browser.newPage();
cookiePage.setDefaultNavigationTimeout(60000);
await cookiePage.goto('http://www.google.com/404');
for (let cookie of GOOGLE_COOKIES) {
await cookiePage.setCookie({
name: cookie.name,
value: cookie.value
});
}
await cookiePage.close();
}
// Create main page
this.page = await this.browser.newPage();
// Close first empty page
(await this.browser.pages())[0].close();
// Max the viewport
await this.page.setViewport({
width: 0,
height: 0
});
//Set timeout
await this.page.setDefaultNavigationTimeout(0);
// Allow interception
await this.page.setRequestInterception(true)
// Set up listeners
await this.setListeners();
// Navigate to the page and solve captcha if needed
while (!(await this.goTo(config.url, true))) {
// Wait for the set timeout
await new Promise(resolve => setTimeout(resolve, config.retryDelay));
}
// Splash mode
if (config.splashMode) {
// Wait for splash page to be found
const cookie = await this.waitForATC();
// Switch to headed browser if needed
if (!config.headlessAfterSplash && config.headless)
await this.lauchHeadedBrowser(await this.page.cookies());
logger.info(this.instance, `HMAC Name = ${cookie[0]}, HMAC Value = ${cookie[1]}`);
logger.info(this.instance, `Looking for captchas...`);
// Look for captchas
const cap = await this.findCaptchas();
// Notify user
if (cap != false) {
logger.info(this.instance, `Solving captcha...`);
// Solve captcha and set as solution
this.captchaSolution =
await this.solveCaptchas(cap);
} else {
logger.info(this.instance, `No captcha found.`);
}
// Log success
logger.success(this.instance);
// Notify user
if (config.alerts) {
notifier.notify({
title: 'Adidas Bruteforcer',
message: `Cart page on instance ${this.instance}!`,
sound: 'Hero',
timeout: 60000
}, async (err, res) => {
if (res == 'activate') {
await this.page.bringToFront();
}
});
}
}
// Auto cart the shoe
if (config.autoCart.enabled) {
if (PID == null && config.autoCart.PID != "") PID = config.autoCart.PID;
else if (PID == null) {
logger.info(this.instance, `Waiting for PID to be discovered...`);
// Wait for productID to be discovered
await new Promise(async resolve => {
var interval = setInterval(function () {
if (PID != null) {
clearInterval(interval);
resolve();
}
}, config.detectionInterval);
});
}
// Cart the shoe
while (!(await this.cartProduct())) {
await new Promise(resolve => setTimeout(resolve, config.retryDelay));
}
logger.info(this.instance, `Carted shoe!`);
await this.page.goto(checkoutUrl, { waitUntil: 'domcontentloaded' });
}
// Submit checkout information
if (config.autoCheckout.enabled) {
while (true) {
if (await this.submitShipping()) break;
await new Promise(resolve => setTimeout(resolve, config.retryDelay));
}
await this.page.goto(paymentUrl, { waitUntil: 'domcontentloaded' });
await this.submitPayment();
};
}
async stop() {
await this.browser.close();
}
// Navigate to a page with error catches
// Also solves a captcha is one is found before resolving
async goTo(url, lookForCaptcha) {
try {
await this.page.goto(config.url, { waitUntil: 'domcontentloaded' });
// Set base url
if (baseUrl == null) baseUrl = await this.page.url();
// Click on page, triggers bmak
try {
await this.page.click('body')
} catch (err) {
logger.error(this.instance, `Error clicking on body element!`)
}
// Send bmak so that we don't get banned on ATC
const bmak = await this.page.evaluate(() => {
if (typeof bmak.startTracking != "function") return false;
bmak.startTracking();
return true;
});
// If calling the bmak function fails, manually trigger the function
if (!bmak) {
// Select the size dropdown
try {
await (await this.page.$x("//*[text() = 'Select size']"))[0].click();
} catch (err) {
// Not found
}
}
// If we are looking for captchas
if (lookForCaptcha) {
try {
// Wait for captcha to load
await this.page.waitForFunction(
"document.querySelector(`iframe[src^='https://www.google.com/recaptcha/api2/anchor'][name^='a-']`)"
+ "&& document.querySelector(`iframe[src^='https://www.google.com/recaptcha/api2/anchor'][name^='a-']`).clientHeight != 0",
{ visible: true, timeout: 5000 });
// Solve captchas
if (!config.twocaptcha.enabled || config.twocaptcha.apiKey == "") {
logger.error(this.instance, `Captcha detected, cannot solve because either 2captcha is not enabled or you did not supply an API key!`);
} else {
// Find captcha
const cap = await this.findCaptchas();
// Notify user
logger.info(this.instance, `Solving captcha...`);
// Solve captcha and set as solution
this.captchaSolution =
await this.solveCaptchas(cap);
return true;
}
} catch (err) {
// Captcha not found
if (err.name == "TimeoutError")
return true;
logger.error(this.instance, `Unknown error occured: ${err}`);
return false;
}
}
return true;
} catch (err) {
logger.error(this.instance, `Error loading page: ${err}`);
return false;
}
}
// Contains event handlers for various pages and conditions
async setListeners() {
var matchRule = (str, rule) => {
return new RegExp("^" + rule.split("*").join(".*") + "$").test(str);
}
// Handlers
this.page.on('response', async response => {
// Catch availability response
if (matchRule(response.url(), '*/api/products/*/availability*')) {
try {
let json = await response.json();
PID = json.id;
availibility = json.variation_list;
} catch (err) {
logger.error(this.instance, `Error parsing availability JSON: ${err}`);
}
}
// Catch waiting room config response
if (matchRule(response.url(), '*waitingRoomConfig.json')) {
try {
let json = await response.json();
} catch (err) {
logger.error(this.instance, `Error parsing waiting room config JSON: ${err}`);
}
}
});
// Needed to prevent page from idling
this.page.on('request', request => {
request.continue();
});
}
async lauchHeadedBrowser(cookies) {
this.browser.close();
this.browser = await puppeteer.launch({
args: [
'--no-sandbox',
'--disable-setuid-sandbox',
'--disable-dev-shm-usage',
'--disable-accelerated-2d-canvas',
'--disable-gpu',
`--window-size=${config.windowWidth},${config.windowHeight}`
],
headless: false,
});
if (Object.keys(GOOGLE_COOKIES).length != 0) {
const cookiePage2 = await browser.newPage();
cookiePage2.setDefaultNavigationTimeout(60000);
await cookiePage2.goto('http://www.google.com/404');
for (let cookie of GOOGLE_COOKIES) {
await cookiePage2.setCookie({
name: cookie.name,
value: cookie.value
});
}
await cookiePage2.close();
}
this.page = (await this.browser.pages())[0];
this.page.setViewport({ width: 0, height: 0 });
// Set cookies
await this.page.setCookie(...cookies);
// Pass detection
await this.preparePage();
// Set up listeners
await this.setListeners();
await this.page.goto(baseUrl);
}
async waitForATC() {
return new Promise((resolve, reject) => {
var interval = setInterval(async function (page) {
let cookies = await page.cookies();
for (let cookie of cookies) {
if (cookie.value.includes(config.splashCookieKeyword)) {
clearInterval(interval);
resolve([cookie.name, cookie.value]);
}
}
}, config.detectionInterval, this.page);
});
}
// Finds a captcha on the page and returns the object
async findCaptchas() {
return new Promise(async (resolve, reject) => {
if (!config.twocaptcha.enabled || config.twocaptcha.apiKey == "") resolve(false);
try {
let { captchas, error } = await this.page.findRecaptchas();
if (error != null) {
logger.error(this.instance, `Error finding captcha: ${error}`)
} else if (captchas.length != 0) {
logger.info(this.instance, `Found captcha!`)
resolve(captchas);
}
} catch (err) {
logger.error(this.instance, `Error finding captcha: ${err}`)
resolve(false);
}
})
}
// Resolves when the captcha is solved and entered
async solveCaptchas(captchas) {
// Return if there was an error
if (captchas == false) return false;
try {
let { solutions, error1 } = await this.page.getRecaptchaSolutions(captchas)
let { solved, error2 } = await this.page.enterRecaptchaSolutions(solutions)
if (error1) {
logger.error(this.instance, `Error solving captcha: ${error1}`);
} else if (error2) {
logger.error(this.instance, `Error solving captcha: ${error2}`);
} else {
return solutions[0].text;
}
return false;
} catch (err) {
logger.error(this.instance, `Error solving captcha: ${err}`);
return false;
}
}
async submitShipping() {
// Serialize the checkout form
let checkoutForm = await this.page.evaluate(async () => {
try {
return await jQuery('#shippingForm').serialize();
} catch (e) {
return null;
}
});
// Catch null shippingForm
if (checkoutForm == null) {
logger.error(this.instance, "Failed to serialize shippingForm!")
return false;
}
// Convert it to JSON
var json = querystring.parse(checkoutForm);
// Grab user data from config file
var userData = config.autoCheckout.data;
Object.keys(json).forEach(function (k) {
for (var name in userData) {
if (k.includes(name))
json[k] = userData[name];
}
});
// Add the last fields that are created by a scrtipt
json['dwfrm_shipping_updateshippingmethods'] = 'updateshippingmethods';
json['dwfrm_shipping_submitshiptoaddress'] = 'Review and Pay';
return await this.page.evaluate(async (body, url) => {
try {
await fetch(url, {
"credentials": "omit",
"headers": {
"accept": "application/json, text/plain, */*",
"content-type": "application/x-www-form-urlencoded;charset=UTF-8"
},
"referrer": "https://www.adidas.com/on/demandware.store/Sites-adidas-US-Site/en_US/COShipping-Show",
"referrerPolicy": "no-referrer-when-downgrade",
"body": body,
"method": "POST",
"mode": "cors"
});
return true;
} catch (err) {
logger.error(this.instance, err);
return false;
}
}, querystring.stringify(json), shippingSubmitUrl);
}
async submitPayment() {
// Pulled from adidas.com
function getCardType(cardNumber, mode) {
var result = 'other';
var returnMode = mode || 0; // return mode 0 - as string (default), 1 - as digit
if (typeof cardNumber == 'undefined' || !cardNumber.length) {
return result;
}
var cardNumber = cardNumber.replace(/[\s-]/g, '');
// first check for MasterCard (number starts with ranges 51-55 or 2221-2720)
if (/^(?:5[1-5]|222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)/.test(cardNumber)) {
result = returnMode ? '002' : 'mc';
}
// then check for Visa
else if (/^4/.test(cardNumber)) {
result = returnMode ? '001' : 'visa';
}
// then check for AmEx
else if (/^3[47]/.test(cardNumber)) {
result = returnMode ? '003' : 'amex';
}
// then check for Discover
else if (/^(6011|622(12[6-9]|1[3-9][0-9]|[2-8][0-9]{2}|9[0-1][0-9]|92[0-5]|64[4-9])|65)/.test(cardNumber)) {
result = returnMode ? '004' : 'discover';
}
// then check for Diners Club International
else if (/^3(?:0|6|8)/.test(cardNumber)) {
result = returnMode ? '005' : 'diners';
}
// then check for ELO
else if (/^((((636368)|(438935)|(504175)|(451416)|(636297)|(506699))\d{0,10})|((5067)|(4576)|(4011))\d{0,12})$/.test(cardNumber)) {
result = returnMode ? '006' : 'elo';
}
// then check for Hipercard
else if (/^(606282\d{10}(\d{3})?)|(3841\d{15})$/.test(cardNumber)) {
result = returnMode ? '007' : 'hipercard';
}
// then check for electron
else if (/^(4026|417500|4405|4508|4844|4913|4917)\d+$/.test(cardNumber)) {
result = returnMode ? '008' : 'electron';
}
// then check for Cabal cards
else if (/(^604(([23][0-9][0-9])|(400))(\d{10})$)|(^589657(\d{10})$)/.test(cardNumber)) {
result = returnMode ? '011' : 'CABAL';
}
// then check for Naranja cards
else if (/^589562(\d{10})$/.test(cardNumber)) {
result = returnMode ? '012' : 'NARANJA';
}
// then check for maestro
else if (/^(?:5[0678]\d\d|6304|6390|67\d\d)\d{8,15}$/.test(cardNumber)) {
result = returnMode ? '009' : 'maestro';
}
// then check for MIR cards ( the number starts in range 2200-2204 )
else if (/^220[0-4]\d{12}$/.test(cardNumber)) {
result = returnMode ? '010' : 'MIR';
}
//Then check for troy cards (the number starts in range 979200-979289)
else if ((/^9792[0-8][0-9]\d{10}$/.test(cardNumber))) {
result = returnMode ? '' : 'troy';
}
return result;
}
// Grab user data from config file
var userData = config.autoCheckout.data;
// Pull the form fields
let formData = querystring.parse(await this.page.evaluate(async () => {
return $("#dwfrm_payment").serialize();
}))
// Catch error where shipping info was entered incorrectly
if (formData['dwfrm_payment_creditCard_owner'].includes("null")) {
logger.error(this.instance, "Invalid shipping info detected! Please continue manually");
return false;
}
// Fill out form data
Object.keys(formData).forEach(function (k) {
for (var userEntry in userData)
if (k.includes(userEntry))
formData[k] = userData[userEntry];
});
formData["dwfrm_payment_creditCard_type"] = getCardType(userData.creditCard_number, 1)
formData["format"] = "ajax";
// Submit payment data to adidas.com
let respJson = await this.page.evaluate(async (body, url) => {
try {
const response = await fetch(url, {
"credentials": "include",
"headers": {
"accept": "application/json, text/javascript, */*; q=0.01",
"accept-language": "en-US,en;q=0.9,fr;q=0.8",
"content-type": "application/x-www-form-urlencoded; charset=UTF-8",
"x-requested-with": "XMLHttpRequest"
},
"referrer": "https://www.adidas.com/on/demandware.store/Sites-adidas-US-Site/en_US/COSummary2-Start",
"referrerPolicy": "no-referrer-when-downgrade",
"body": body,
"method": "POST",
"mode": "cors"
});
return await response.json();
} catch (e) {
console.log(e)
return null;
}
}, querystring.stringify(formData), paymentSubmitUrl);
if (respJson != null &&
respJson.hasErrors === false &&
(typeof (respJson.fieldsToSubmit) == 'object')) {
} else {
logger.error(this.instance, "Failed to submit payment information: there was an error with your payment information!")
}
}
async cartProduct() {
async function cart(sku, size, page, baseUrl, instance, captcha) {
// Need a delay to prevent bans - driven by event handler in the future?
// await new Promise(resolve => setTimeout(resolve, 10000));
let response = await page.evaluate(async (cartUrl, baseUrl, sku, PID, size, captcha) => {
const res = await fetch(cartUrl, {
"credentials": "include",
"headers": {
"accept": "*/*",
"accept-language": "en-US,en;q=0.9,fr;q=0.8",
"content-type": "application/json",
},
"referrer": baseUrl,
"referrerPolicy": "no-referrer-when-downgrade",
"body": JSON.stringify({
captchaResponse: captcha,
displaySize: size,
productId: sku,
product_id: PID,
product_variation_sku: sku,
quantity: 1,
size: size
}),
"method": "POST",
"mode": "cors"
});
try {
if (res.status == 200) {
const json = await res.json();
return { success: true, json: json, statusCode: res.status };
}
} catch (err) { }
return { success: false, json: null, statusCode: res.status };
}, cartUrl, baseUrl, sku, PID, size, captcha);
if (response.success && response.json.cart.product_quantity != 0) {
return true;
} else if (response.json != null) {
switch (response.json.message) {
case "INVALID-CAPTCHA":
logger.info(instance, `Failed to cart shoe: invalid captcha supplied!`);
break;
default: logger.info(instance, `Failed to cart shoe: an unknown error occured!`);
}
} else {
switch (response.statusCode) {
case 403:
logger.info(instance, `Failed to cart shoe: temporary ban occured!`);
break;
default: logger.info(instance, `Failed to cart shoe: an unknown error occured!`);
}
}
}
// Cart random size
if (sizesToCart.length == 0) {
logger.info(this.instance, "Choosing random size...")
// Filter out OOS sizes
var newArray = availibility.filter(function (el) {
return el.availability > 0;
});
var varient = newArray[Math.floor(Math.random() * newArray.length)];
return await cart(varient.sku, varient.size, this.page, cartUrl, this.instance, this.captchaSolution);
} else {
for (var size of sizesToCart) {
for (var varient of availibility) {
if (varient.size == size && varient.availability > 0) {
return await cart(varient.sku, varient.size, this.page, cartUrl, this.instance, this.captchaSolution);
}
}
}
logger.info(this.instance, `Size(s) ${sizesToCart} not availible`)
return false;
}
}
}
<|start_filename|>regions.js<|end_filename|>
const countryTable = [
{ code: "AU", domain: "com.au", language: "en" },
{ code: "AT", domain: "at", language: "de" },
{ code: "BE", domain: "be", language: "fr" },
{ code: "BR", domain: "com.br", language: "pt" },
{ code: "CA", domain: "ca", language: "en" },
{ code: "CN", domain: "com.cn", language: "zh" },
{ code: "CZ", domain: "cz", language: "cs" },
{ code: "DK", domain: "dk", language: "da" },
{ code: "FI", domain: "fi", language: "fi" },
{ code: "FR", domain: "fr", language: "fr" },
{ code: "DE", domain: "de", language: "de" },
{ code: "IE", domain: "ie", language: "en" },
{ code: "IT", domain: "it", language: "it" },
{ code: "MX", domain: "mx", language: "es" },
{ code: "NL", domain: "nl", language: "nl" },
{ code: "NZ", domain: "co.nz", language: "en" },
{ code: "PH", domain: "com.ph", language: "en" },
{ code: "PL", domain: "pl", language: "pl" },
{ code: "RU", domain: "ru", language: "ru" },
{ code: "SK", domain: "sk", language: "sk" },
{ code: "ES", domain: "es", language: "es" },
{ code: "SE", domain: "se", language: "sv" },
{ code: "GB", domain: "co.uk", language: "en" },
{ code: "US", domain: "com", language: "en" }
]
module.exports = {
getRegion: function (code) {
return countryTable.filter(o => o.code.toLowerCase() == code.toLowerCase())[0]
}
}
<|start_filename|>index.js<|end_filename|>
const Bot = require('./browser-bot.js');
const logger = require('./logger.js');
const fs = require('fs-extra')
const path = require('path');
const rimraf = require("rimraf");
const AutoUpdater = require('auto-updater');
var config;
if (fs.existsSync(".git")) {
config = require("./dev.config.json");
} else {
config = require("./config.json");
}
// Contains running bots
var bots = [];
var needsRestart = false;
// Paths
var saveDir = 'saves'
var localWebDataDir = path.join(__dirname, "webdata")
// Generate webdata location
var chromeWebDataDir = process.env.LOCALAPPDATA + "\\Google\\Chrome\\User Data\\Default" ||
(process.platform == 'darwin' ? process.env.HOME + 'Library/Application Support/Google/Chrome' : '~/.config/google-chrome')
// Load proxies
var proxies = fs.readFileSync('proxies.txt').toString().split("\n");
if (proxies[0] == '' || proxies[0] == '\r') proxies = [];
// Remove saves from the last run
rimraf.sync(saveDir);
var autoupdater = new AutoUpdater({
pathToJson: '',
autoupdate: false,
checkgit: true,
jsonhost: 'raw.githubusercontent.com',
contenthost: 'codeload.github.com',
progressDebounce: 0,
devmode: true
});
// State the events
autoupdater.on('git-clone', function () {
launchTasks();
});
autoupdater.on('check.up-to-date', function (v) {
console.info("You have the latest version: " + v);
launchTasks();
});
autoupdater.on('check.out-dated', function (v_old, v) {
console.warn("Your version is outdated. " + v_old + " of " + v);
needsRestart = true;
autoupdater.fire('download-update');
});
autoupdater.on('update.downloaded', function () {
console.log("Update downloaded and ready for install");
autoupdater.fire('extract');
});
autoupdater.on('update.not-installed', function () {
console.log("The Update was already in your folder! It's read for install");
autoupdater.fire('extract');
});
autoupdater.on('update.extracted', function () {
console.log("Update extracted successfully!");
});
autoupdater.on('download.start', function (name) {
console.log("Starting downloading: " + name);
});
autoupdater.on('download.progress', function (name, perc) {
process.stdout.write("Downloading " + perc + "% \033[0G");
});
autoupdater.on('download.end', function (name) {
console.log("Downloaded " + name);
});
autoupdater.on('download.error', function (err) {
console.error("Error when downloading: " + err);
});
autoupdater.on('end', function () {
if (needsRestart) {
var exec = require('child_process').exec,
child;
child = exec('npm i',
function (error, stdout, stderr) {
if (error !== null) {
console.log('Error: ' + error);
}
});
console.warn("Please restart the application!")
}
});
autoupdater.on('error', function (name, e) {
console.error(name, e);
});
// Check for updates
autoupdater.fire('check');
// Launches tasks
async function launchTasks() {
// Show intro
logger.intro(config.taskCount, proxies.length);
for (let index = 0; index < config.taskCount; index++) {
if (proxies.length != 0) {
bots.push(new Bot({ instance: index + 1, proxy: [index % proxies.length] }));
} else {
bots.push(new Bot({ instance: index + 1 }));
}
if (config.webdata.enabled) {
if (config.webdata.path != "") {
await copyWebData(config.webdata.path + '\\Web Data', path.resolve('saves', 'chrome_' + index, "Default"));
} else {
await copyWebData(chromeWebDataDir + '\\Web Data', path.resolve('saves', 'chrome_' + index, "Default"));
}
}
setTimeout(function (i) {
bots[i].start();
}, config.startUpDelayInterval * index, index);
}
}
// Moves webdata and deletes old data
async function copyWebData(file, dir2) {
// Clear previous data dirs
//TDOD: Configuration option for clearing previous sessions' data
fs.ensureDir(dir2, err => {
var f = path.basename(file);
var source = fs.createReadStream(file);
var dest = fs.createWriteStream(path.resolve(dir2, f));
source.pipe(dest);
source.on('error', function (err) { console.log(err); });
return true;
});
}; | hk1722/adidas-bot |
<|start_filename|>tests/var_recovery/src/linux/union_global/union_global.c<|end_filename|>
#include <stdio.h>
#include <string.h>
#include <stdint.h>
#include <inttypes.h>
union ufoo {
uint64_t u64;
uint32_t u32;
uint16_t u16;
uint8_t u8;
uint8_t bytes[8];
};
union ufoo global_ufoo;
static void dump_union(union ufoo *f) {
printf("\t0x%" PRIx64 "\n", f->u64);
printf("\t0x%" PRIx32 "\n", f->u32);
printf("\t0x%" PRIx16 "\n", f->u16);
printf("\t0x%" PRIx8 "\n", f->u8);
printf("\t");
for(int i = 0; i < 8; i++ ) {
printf("0x%"PRIx8 " ", f->bytes[i]);
}
printf("\n");
}
int main(int argc, const char *argv[]) {
printf("Union before modifiction:\n");
memset(&global_ufoo, 0, sizeof(global_ufoo));
dump_union(&global_ufoo);
if(argc % 2 == 0) {
printf("Not adding to variables\n");
} else {
global_ufoo.u64 += argc;
global_ufoo.u32 += argc;
global_ufoo.u16 += argc;
global_ufoo.u8 += argc;
}
printf("Union after modifiction:\n");
dump_union(&global_ufoo);
return 0;
}
<|start_filename|>tests/test_suite_generator/src/linux/lodsb/main.c<|end_filename|>
#include <stdio.h>
static inline int mystrcmp(const char *cs, const char *ct) {
register int __res;
__asm__ __volatile__(
"cld\n"
"1:\n"
"lodsb\n"
"scasb\n"
"jne 2f\n"
"testb %%al,%%al\n"
"jne 1b\n"
"xorl %%eax,%%eax\n"
"jmp 3f\n"
"2:\n"
"sbbl %%eax,%%eax\n"
"orb $1,%%al\n"
"3:\n"
: "=a"(__res)
: "S"(cs), "D"(ct)
: "si", "di");
return __res;
}
static inline char *mystrcpy(char *dest, const char *src) {
int d0, d1, d2;
__asm__ __volatile__(
"1:\n\t"
"\tlodsb\n\t"
"stosb\n\t"
"testb %%al,%%al\n\t"
"jne 1b"
: "=&S"(d0), "=&D"(d1), "=&a"(d2)
: "0"(src), "1"(dest)
: "memory");
return dest;
}
int main(void) {
int i;
char buf[] = "i am really cool too\n";
char str[] = "i am so cool........\n";
mystrcpy(buf, str);
for (i = 0; i < 13; i++) printf("%c", buf[i]);
printf("\n");
buf[13] = '\0';
if (mystrcmp("i am so cool.", buf) == 0) {
printf("The strings match\n");
} else {
printf("The strings do not match\n");
}
return 0;
}
<|start_filename|>tests/test_suite_generator/src/linux/arc4/main.cpp<|end_filename|>
#include "arc4.h"
#include <cstdio>
#include <cstring>
int main() {
const char password[] = "password";
const uint8_t data[] = "Hello, world!";
arc4_ctx_t ctx;
uint8_t buffer1[sizeof(data)] = {};
arc4_setkey(&ctx, reinterpret_cast<const uint8_t *>(password),
sizeof(password) - 1);
arc4_encrypt(&ctx, buffer1, data, sizeof(data));
uint8_t buffer2[sizeof(data)] = {};
arc4_setkey(&ctx, reinterpret_cast<const uint8_t *>(password),
sizeof(password) - 1);
arc4_decrypt(&ctx, buffer2, buffer1, sizeof(buffer1));
std::printf("%s\n", buffer2);
if (std::memcmp(data, buffer2, sizeof(data)) != 0) {
return 1;
}
return 0;
}
<|start_filename|>tests/test_suite_generator/src/linux/arc4/arc4.cpp<|end_filename|>
/* $NetBSD: arc4.c,v 1.7 2014/08/10 16:44:35 tls Exp $ */
/*
* ARC4 implementation
* A Stream Cipher Encryption Algorithm "Arcfour"
* <draft-kaukonen-cipher-arcfour-03.txt>
*/
/* This code illustrates a sample implementation
* of the Arcfour algorithm
* Copyright (c) April 29, 1997 <NAME>.
* All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that this copyright
* notice and disclaimer are retained.
*
* THIS SOFTWARE IS PROVIDED BY <NAME> AND CONTRIBUTORS ``AS
* IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL KALLE
* KAUKONEN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "arc4.h"
int arc4_ctxlen(void) { return sizeof(struct arc4_ctx); }
void arc4_setkey(void *ctxp, const uint8_t *key, uint32_t keylen) {
struct arc4_ctx *ctx = static_cast<arc4_ctx *>(ctxp);
unsigned int i, t, u, ki, si;
unsigned int *state;
state = ctx->state;
ctx->x = 0;
ctx->y = 0;
for (i = 0; i < 256; i++) state[i] = i;
ki = si = 0;
for (i = 0; i < 256; i++) {
t = state[i];
si = (si + key[ki] + t) & 0xff;
u = state[si];
state[si] = t;
state[i] = u;
if (++ki >= keylen) ki = 0;
}
}
void arc4_encrypt(void *ctxp, uint8_t *dst, const uint8_t *src, int len) {
struct arc4_ctx *ctx = static_cast<arc4_ctx *>(ctxp);
unsigned int x, y, sx, sy;
unsigned int *state;
const unsigned char *endsrc;
state = ctx->state;
x = ctx->x;
y = ctx->y;
for (endsrc = src + len; src != endsrc; src++, dst++) {
x = (x + 1) & 0xff;
sx = state[x];
y = (sx + y) & 0xff;
state[x] = sy = state[y];
state[y] = sx;
*dst = *src ^ state[(sx + sy) & 0xff];
}
ctx->x = x;
ctx->y = y;
}
void arc4_stream(void *ctxp, uint8_t *dst, int len) {
struct arc4_ctx *ctx = static_cast<arc4_ctx *>(ctxp);
unsigned int x, y, sx, sy;
unsigned int *state;
const unsigned char *enddst;
state = ctx->state;
x = ctx->x;
y = ctx->y;
for (enddst = dst + len; dst != enddst; dst++) {
x = (x + 1) & 0xff;
sx = state[x];
y = (sx + y) & 0xff;
state[x] = sy = state[y];
state[y] = sx;
*dst = state[(sx + sy) & 0xff];
}
ctx->x = x;
ctx->y = y;
}
void arc4_decrypt(void *ctxp, uint8_t *dst, const uint8_t *src, int len) {
arc4_encrypt(ctxp, dst, src, len);
}
<|start_filename|>tests/var_recovery/src/linux/multi_global/multi_global.c<|end_filename|>
#include <stdio.h>
#include <stdint.h>
#include <inttypes.h>
uint64_t global_u64 = 0x3133731337ULL;
uint32_t global_u32 = 0x31337;
uint16_t global_u16 = 0x1337;
uint8_t global_u8 = 0x37;
static void dump_globals() {
printf("\t0x%" PRIx64 "\n", global_u64);
printf("\t0x%" PRIx32 "\n", global_u32);
printf("\t0x%" PRIx16 "\n", global_u16);
printf("\t0x%" PRIx8 "\n", global_u8);
}
int main(int argc, const char *argv[]) {
printf("Globals lists before modifiction:\n");
dump_globals();
if(argc % 2 == 0) {
printf("Not adding to global variable\n");
} else {
global_u64 += argc;
global_u32 += argc;
global_u16 += argc;
global_u8 += argc;
}
printf("Globals lists after modifiction:\n");
dump_globals();
return 0;
}
<|start_filename|>tests/test_suite_generator/src/linux/arc4/arc4.h<|end_filename|>
/* $NetBSD: arc4.h,v 1.5 2014/08/10 16:44:35 tls Exp $ */
/*
* ARC4 implementation
* A Stream Cipher Encryption Algorithm "Arcfour"
* <draft-kaukonen-cipher-arcfour-03.txt>
*/
/* This code illustrates a sample implementation
* of the Arcfour algorithm
* Copyright (c) April 29, 1997 <NAME>.
* All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that this copyright
* notice and disclaimer are retained.
*
* THIS SOFTWARE IS PROVIDED BY <NAME> AND CONTRIBUTORS ``AS
* IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL KALLE
* KAUKONEN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _CRYPTO_ARC4_H_
#define _CRYPTO_ARC4_H_
typedef unsigned char uint8_t;
typedef unsigned int uint32_t;
typedef struct arc4_ctx {
unsigned int x;
unsigned int y;
unsigned int state[256];
/* was unsigned char, changed to int for performance -- onoe */
} arc4_ctx_t;
int arc4_ctxlen(void);
void arc4_setkey(void *, const uint8_t *, unsigned int);
void arc4_encrypt(void *, uint8_t *, const uint8_t *, int);
void arc4_decrypt(void *, uint8_t *, const uint8_t *, int);
void arc4_stream(void *, uint8_t *, int);
#endif /* _CRYPTO_ARC4_H_ */
<|start_filename|>tests/var_recovery/src/linux/array_struct_global/array_struct_global.c<|end_filename|>
#include <stdio.h>
#include <string.h>
#include <stdint.h>
#include <inttypes.h>
struct foo {
uint64_t u64;
uint32_t u32;
uint16_t u16;
uint8_t u8;
};
struct foo global_foos[64];
static void dump_struct(struct foo foos[64]) {
for(int i = 0; i < 64; i++) {
printf("\t0x%" PRIx64 "\n", foos[i].u64);
printf("\t0x%" PRIx32 "\n", foos[i].u32);
printf("\t0x%" PRIx16 "\n", foos[i].u16);
printf("\t0x%" PRIx8 "\n", foos[i].u8);
}
}
int main(int argc, const char *argv[]) {
printf("Array of struct before modifiction:\n");
memset(&global_foos, 0, sizeof(global_foos));
dump_struct(global_foos);
if(argc % 2 == 0) {
printf("Not adding to variables\n");
} else {
for(int i = 0; i < 64; i ++ ) {
global_foos[i].u64 += argc + i;
global_foos[i].u32 += argc + i;
global_foos[i].u16 += argc + i;
global_foos[i].u8 += argc + i;
}
}
printf("Array of struct after modifiction:\n");
dump_struct(global_foos);
return 0;
}
<|start_filename|>tests/var_recovery/src/linux/struct_global/struct_global.c<|end_filename|>
#include <stdio.h>
#include <string.h>
#include <stdint.h>
#include <inttypes.h>
struct foo {
uint64_t u64;
uint32_t u32;
uint16_t u16;
uint8_t u8;
};
struct foo global_foo;
static void dump_struct(struct foo *f) {
printf("\t0x%" PRIx64 "\n", f->u64);
printf("\t0x%" PRIx32 "\n", f->u32);
printf("\t0x%" PRIx16 "\n", f->u16);
printf("\t0x%" PRIx8 "\n", f->u8);
}
int main(int argc, const char *argv[]) {
printf("Struct before modifiction:\n");
memset(&global_foo, 0, sizeof(global_foo));
dump_struct(&global_foo);
if(argc % 2 == 0) {
printf("Not adding to variables\n");
} else {
global_foo.u64 += argc;
global_foo.u32 += argc;
global_foo.u16 += argc;
global_foo.u8 += argc;
}
printf("Struct after modifiction:\n");
dump_struct(&global_foo);
return 0;
}
<|start_filename|>tools/setup_launcher.bat<|end_filename|>
@echo off
:main
if "%~1" == "" (
echo The installation prefix parameter is missing
exit /B 1
)
setlocal enableextensions
set install_folder=%1
set PYTHONPATH=%install_folder%\Lib\site-packages
set install_log=%TEMP%\%RANDOM%
if not exist "%PYTHONPATH%\\" (
echo Creating %PYTHONPATH%
md "%PYTHONPATH%"
if errorlevel 1 (
echo Failed to create the site-packages folder in %PYTHONPATH%
exit /B 1
)
)
echo Installing mcsema-disass
echo - Destination folder: %install_folder%
echo - PYTHONPATH: %PYTHONPATH%
python setup.py install -f --prefix="%install_folder%" > %install_log% 2>&1
if errorlevel 1 (
echo Failed to install the Python package to %install_folder%. Error output follows
type %install_log%
del %install_log%
endlocal
exit /B 1
)
endlocal
exit /B 0
call :main %1
exit /B %ERRORLEVEL%
<|start_filename|>tests/var_recovery/src/linux/single_global/single_global.c<|end_filename|>
#include <stdio.h>
#include <stdint.h>
#include <inttypes.h>
uint64_t global_u64 = 0x31337;
int main(int argc, const char *argv[]) {
if(argc % 2 == 0) {
printf("Not adding to global variable\n");
} else {
global_u64 += argc;
}
printf("Global is: 0x%" PRIx64 "\n", global_u64);
return 0;
}
<|start_filename|>tests/var_recovery/src/linux/array_global/array_global.c<|end_filename|>
#include <stdio.h>
#include <string.h>
#include <stdint.h>
#include <inttypes.h>
uint64_t global_array[64];
static void dump_array(uint64_t global_array[64]) {
for(int i = 0; i < 64; i++) {
if(i % 8 == 0) {
printf("\n");
}
printf("\t0x%" PRIx64, global_array[i]);
}
printf("\n");
}
int main(int argc, const char *argv[]) {
printf("Array before modifiction:\n");
memset(&global_array, 0, sizeof(global_array));
dump_array(global_array);
if(argc % 2 == 0) {
printf("Not adding to variables\n");
} else {
if(argc <= 0) {
argc = 1;
}
for(int i = 0; i < 64; i += argc) {
global_array[i] += argc;
}
}
printf("Array after modifiction:\n");
dump_array(global_array);
return 0;
}
<|start_filename|>examples/Maze/Maze.c<|end_filename|>
/*
* Copyright (c) 2018 Trail of Bits, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* It's a maze!
* Use a,s,d,w to move "through" it.
*/
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
/* Dimensions of the Maze */
enum {
kWidth = 11,
kHeight = 7
};
/* Hard-coded maze */
char maze[kHeight][kWidth] = {
{'+', '-', '+', '-', '-', '-', '+', '-', '-', '-', '+'},
{'|', ' ', '|', ' ', ' ', ' ', ' ', ' ', '|', '#', '|'},
{'|', ' ', '|', ' ', '-', '-', '+', ' ', '|', ' ', '|'},
{'|', ' ', '|', ' ', ' ', ' ', '|', ' ', '|', ' ', '|'},
{'|', ' ', '+', '-', '-', ' ', '|', ' ', '|', ' ', '|'},
{'|', ' ', ' ', ' ', ' ', ' ', '|', ' ', ' ', ' ', '|'},
{'+', '-', '-', '-', '-', '-', '+', '-', '-', '-', '+'},
};
/**
* Draw the maze state in the screen!
*/
void draw(void) {
int i, j;
for (i = 0; i < kHeight; i++) {
for (j = 0; j < kWidth; j++) {
printf("%c", maze[i][j]);
}
printf("\n");
}
printf("\n");
}
enum {
kMaxNumPlayerMoves = 28
};
/**
* The main function
*/
int main(int argc, char *argv[]) {
int x, y; /* Player position */
int ox, oy; /* Old player position */
int i = 0; /* Iteration number */
char program[kMaxNumPlayerMoves];
/* Initial position */
x = 1;
y = 1;
maze[y][x] = 'X';
/* Print some info. */
printf("Maze dimensions: %dx%d\n", kWidth, kHeight);
printf("Player position: %dx%d\n", x, y);
printf("Iteration no. %d\n", i);
printf("Program the player moves with a sequence of 'w', 's', 'a' and 'd'\n");
printf("Try to reach the price(#)!\n");
/* Draw the maze */
draw();
/* Read the directions 'program' to execute... */
read(STDIN_FILENO, program, kMaxNumPlayerMoves);
/* Iterate and run 'program'. */
while (i < kMaxNumPlayerMoves) {
/* Save old player position */
ox = x;
oy = y;
/* Move player position depending on the actual command */
switch (program[i]) {
case 'w':
y--;
break;
case 's':
y++;
break;
case 'a':
x--;
break;
case 'd':
x++;
break;
default:
printf("Wrong command, only w,s,a,d are accepted!)\n");
printf("You lose!\n");
exit(EXIT_FAILURE);
}
/* If hit the price, You Win!! */
if (maze[y][x] == '#') {
printf("You win!\n");
printf("Your solution <%42s>\n", program);
exit(EXIT_SUCCESS);
}
/* If something is wrong do not advance. */
if (maze[y][x] != ' '
&& !((y == 2 && maze[y][x] == '|' && x > 0 && x < kWidth))) {
x = ox;
y = oy;
}
/* Print new maze state and info... */
printf("Player position: %dx%d\n", x, y);
printf("Iteration no. %d. Action: %c. %s\n", i, program[i],
((ox == x && oy == y) ? "Blocked!" : ""));
/* If crashed to a wall! Exit, you lose */
if (ox == x && oy == y) {
printf("You lose\n");
exit(EXIT_FAILURE);
}
/* put the player on the maze... */
maze[y][x] = 'X';
/* draw it */
draw();
/* increment iteration */
i++;
/* me wait to human */
sleep(1);
}
/* You couldn't make it! You lose! */
printf("You lose\n");
return EXIT_FAILURE;
}
<|start_filename|>tests/test_suite_generator/src/linux/pthread/main.c<|end_filename|>
/*
* Copyright (c) 2017 Trail of Bits, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <stdio.h>
#include <stdlib.h>
#include <pthread.h>
static pthread_cond_t gCond;
static pthread_mutex_t gLock;
static int gFlag = 0;
__thread int tls_data1;
__thread int tls_data2;
typedef struct {
int data1;
int data2;
} thread_parm_t;
void bar() {
printf("bar(), tls data=%d %d\n", tls_data1, tls_data2);
return;
}
void foo() {
printf("foo(), tls data=%d %d\n", tls_data1, tls_data2);
bar();
}
void *theThread(void *parm)
{
thread_parm_t *gData;
pthread_mutex_lock(&gLock);
pthread_cond_wait(&gCond, &gLock);
gFlag += 1;
gData = (thread_parm_t*)parm;
tls_data1 = gData->data1;
tls_data2 = gData->data2;
foo();
pthread_mutex_unlock(&gLock);
return NULL;
}
int main(int argc, char **argv) {
int rc=0, i;
pthread_t thread[2];
thread_parm_t gData[2];
printf("Create threads\n");
pthread_mutex_init(&gLock, NULL);
pthread_cond_init(&gCond, NULL);
for (i=0; i < 2; i++) {
gData[i].data1 = i;
gData[i].data2 = (i+1)*2;
rc = pthread_create(&thread[i], NULL, theThread, &gData[i]);
if (rc) {
printf("Failed with %d at pthread_create()", rc);
exit(1);
}
}
// synchronize output. this gets printed before threads print
printf("Wait for the threads to complete, and release their resources\n");
fflush(stdout);
while (gFlag < 2) {
pthread_cond_signal(&gCond);
}
for (i=0; i < 2; i++) {
rc = pthread_join(thread[i], NULL);
if (rc) {
printf("Failed with %d at pthread_join()", rc);
exit(1);
}
}
printf("Main completed\n");
return 0;
}
| abhishekvasishtb/mcsema |