\n \u003Cinput type=\"text\" name=\"superpower\" />\n \u003Cbutton type=\"submit\" is=\"ajax-submit\" data-target=\"hero-container\">\n add new item\n \u003C/button>\n\u003C/form>\n\n\u003Cdiv id=\"hero-container\">\u003C/div>\n","html",[50,7107,7108,7135,7162,7183,7214,7219,7228,7237,7241],{"__ignoreMap":48},[53,7109,7110,7113,7116,7119,7121,7124,7127,7129,7132],{"class":55,"line":56},[53,7111,7112],{"class":82},"\u003C",[53,7114,7018],{"class":7115},"s9eBZ",[53,7117,7118],{"class":59}," action",[53,7120,390],{"class":82},[53,7122,7123],{"class":63},"\"/heroes\"",[53,7125,7126],{"class":59}," method",[53,7128,390],{"class":82},[53,7130,7131],{"class":63},"\"post\"",[53,7133,7134],{"class":82},">\n",[53,7136,7137,7140,7143,7146,7148,7151,7154,7156,7159],{"class":55,"line":86},[53,7138,7139],{"class":82}," \u003C",[53,7141,7142],{"class":7115},"input",[53,7144,7145],{"class":59}," type",[53,7147,390],{"class":82},[53,7149,7150],{"class":63},"\"text\"",[53,7152,7153],{"class":59}," name",[53,7155,390],{"class":82},[53,7157,7158],{"class":63},"\"hero\"",[53,7160,7161],{"class":82}," />\n",[53,7163,7164,7166,7168,7170,7172,7174,7176,7178,7181],{"class":55,"line":126},[53,7165,7139],{"class":82},[53,7167,7142],{"class":7115},[53,7169,7145],{"class":59},[53,7171,390],{"class":82},[53,7173,7150],{"class":63},[53,7175,7153],{"class":59},[53,7177,390],{"class":82},[53,7179,7180],{"class":63},"\"superpower\"",[53,7182,7161],{"class":82},[53,7184,7185,7187,7189,7191,7193,7196,7199,7201,7204,7207,7209,7212],{"class":55,"line":163},[53,7186,7139],{"class":82},[53,7188,7095],{"class":7115},[53,7190,7145],{"class":59},[53,7192,390],{"class":82},[53,7194,7195],{"class":63},"\"submit\"",[53,7197,7198],{"class":59}," is",[53,7200,390],{"class":82},[53,7202,7203],{"class":63},"\"ajax-submit\"",[53,7205,7206],{"class":59}," data-target",[53,7208,390],{"class":82},[53,7210,7211],{"class":63},"\"hero-container\"",[53,7213,7134],{"class":82},[53,7215,7216],{"class":55,"line":186},[53,7217,7218],{"class":82}," add new item\n",[53,7220,7221,7224,7226],{"class":55,"line":221},[53,7222,7223],{"class":82}," \u003C/",[53,7225,7095],{"class":7115},[53,7227,7134],{"class":82},[53,7229,7230,7233,7235],{"class":55,"line":242},[53,7231,7232],{"class":82},"\u003C/",[53,7234,7018],{"class":7115},[53,7236,7134],{"class":82},[53,7238,7239],{"class":55,"line":273},[53,7240,500],{"emptyLinePlaceholder":499},[53,7242,7243,7245,7247,7250,7252,7254,7257,7259],{"class":55,"line":279},[53,7244,7112],{"class":82},[53,7246,7091],{"class":7115},[53,7248,7249],{"class":59}," id",[53,7251,390],{"class":82},[53,7253,7211],{"class":63},[53,7255,7256],{"class":82},">\u003C/",[53,7258,7091],{"class":7115},[53,7260,7134],{"class":82},[43,7262,7266],{"className":7263,"code":7264,"language":7265,"meta":48,"style":48},"language-javascript shiki shiki-themes github-light github-dark","class AjaxSubmitButton extends HTMLButtonElement {\n connectedCallback() {\n this.addEventListener(\"click\", async (event) => {\n event.preventDefault();\n let html = await ajaxFormSubmit();\n document.getElementById(this.dataset.target).innerHTML = html;\n });\n }\n}\n\ncustomElements.define(\"ajax-submit\", AjaxSubmitButton, {\n extends: \"button\",\n});\n","javascript",[50,7267,7268,7284,7292,7325,7336,7354,7375,7380,7385,7389,7393,7408,7418],{"__ignoreMap":48},[53,7269,7270,7273,7276,7279,7282],{"class":55,"line":56},[53,7271,7272],{"class":389},"class",[53,7274,7275],{"class":59}," AjaxSubmitButton",[53,7277,7278],{"class":389}," extends",[53,7280,7281],{"class":59}," HTMLButtonElement",[53,7283,6176],{"class":82},[53,7285,7286,7289],{"class":55,"line":86},[53,7287,7288],{"class":59}," connectedCallback",[53,7290,7291],{"class":82},"() {\n",[53,7293,7294,7297,7299,7302,7304,7307,7309,7312,7315,7318,7320,7323],{"class":55,"line":126},[53,7295,7296],{"class":89}," this",[53,7298,986],{"class":82},[53,7300,7301],{"class":59},"addEventListener",[53,7303,1067],{"class":82},[53,7305,7306],{"class":63},"\"click\"",[53,7308,99],{"class":82},[53,7310,7311],{"class":389},"async",[53,7313,7314],{"class":82}," (",[53,7316,7317],{"class":6186},"event",[53,7319,1665],{"class":82},[53,7321,7322],{"class":389},"=>",[53,7324,6176],{"class":82},[53,7326,7327,7330,7333],{"class":55,"line":163},[53,7328,7329],{"class":82}," event.",[53,7331,7332],{"class":59},"preventDefault",[53,7334,7335],{"class":82},"();\n",[53,7337,7338,7341,7344,7346,7349,7352],{"class":55,"line":186},[53,7339,7340],{"class":389}," let",[53,7342,7343],{"class":82}," html ",[53,7345,390],{"class":389},[53,7347,7348],{"class":389}," await",[53,7350,7351],{"class":59}," ajaxFormSubmit",[53,7353,7335],{"class":82},[53,7355,7356,7359,7362,7364,7367,7370,7372],{"class":55,"line":221},[53,7357,7358],{"class":82}," document.",[53,7360,7361],{"class":59},"getElementById",[53,7363,1067],{"class":82},[53,7365,7366],{"class":89},"this",[53,7368,7369],{"class":82},".dataset.target).innerHTML ",[53,7371,390],{"class":389},[53,7373,7374],{"class":82}," html;\n",[53,7376,7377],{"class":55,"line":242},[53,7378,7379],{"class":82}," });\n",[53,7381,7382],{"class":55,"line":273},[53,7383,7384],{"class":82}," }\n",[53,7386,7387],{"class":55,"line":279},[53,7388,282],{"class":82},[53,7390,7391],{"class":55,"line":496},[53,7392,500],{"emptyLinePlaceholder":499},[53,7394,7395,7398,7401,7403,7405],{"class":55,"line":503},[53,7396,7397],{"class":82},"customElements.",[53,7399,7400],{"class":59},"define",[53,7402,1067],{"class":82},[53,7404,7203],{"class":63},[53,7406,7407],{"class":82},", AjaxSubmitButton, {\n",[53,7409,7410,7413,7416],{"class":55,"line":509},[53,7411,7412],{"class":82}," extends: ",[53,7414,7415],{"class":63},"\"button\"",[53,7417,2252],{"class":82},[53,7419,7420],{"class":55,"line":515},[53,7421,7422],{"class":82},"});\n",[18,7424,7425,7426,7429],{},"Der Vorteil des Custom-Elements gegenüber jQuery (oder sonstiger eigener Implementierung) ist, dass der Browser sich\ndarum kümmert, unser JavaScript mit dem HTML zu verbinden und eine Instanz des ",[50,7427,7428],{},"AjaxSubmitButtons"," zu erstellen.",[18,7431,7432,7433,7436,7437,7442],{},"Kommen zur Laufzeit weitere ",[50,7434,7435],{},"submit"," Buttons mit diesem Attribut hinzu, werden sie vom Browser automatisch mit unserem\ngewünschten Verhalten erweitert. Ist das JavaScript aus irgendwelchen Gründen nicht verfügbar funktioniert weiterhin das\ngute alte HTML Formular mit komplettem Seitenreload. Wir verbessern unsere Anwendung mit jedem weiteren\nTechnologie-Layer, der zur Verfügung\nsteht, ",[585,7438,7441],{"href":7439,"rel":7440},"https://developer.mozilla.org/de/docs/Glossary/Progressive_Enhancement",[589],"Progressive Enhancement"," genannt. HTML\nbeschreibt den Inhalt, CSS macht es bunt, und zu guter Letzt verbessern wir die Benutzererfahrung mit JavaScript.",[18,7444,7445],{},"Das Backend wird mit diesem Ansatz auch nicht komplizierter oder gar komplexer. Im Controller müssen wir erkennen, dass\nes sich um einen Ajax Request handelt. In dem Fall wird ein HTML Fragment gerendert, andernfalls wird die komplette\nSeite gerendert:",[43,7447,7449],{"className":288,"code":7448,"language":290,"meta":48,"style":48},"@PostMapping(value = \"/heroes\")\npublic String addSuperhero(\n @RequestParam String hero,\n @RequestParam String superpower,\n @RequestHeader(name = \"X-Requested-With\", defaultValue = \"\") String requestedWith,\n Model model\n ) {\n\n model.addAttribute(\"hero\", hero);\n model.addAttribute(\"superpower\", superpower);\n\n if (\"ajax\".equals(requestedWith)) {\n return \"fragments/hero-fragment :: hero-fragment\";\n }\n\n return \"full-page-including-the-hero-fragment\";\n}\n\n",[50,7450,7451,7456,7461,7466,7471,7476,7481,7486,7490,7495,7500,7504,7509,7514,7518,7522,7527],{"__ignoreMap":48},[53,7452,7453],{"class":55,"line":56},[53,7454,7455],{},"@PostMapping(value = \"/heroes\")\n",[53,7457,7458],{"class":55,"line":86},[53,7459,7460],{},"public String addSuperhero(\n",[53,7462,7463],{"class":55,"line":126},[53,7464,7465],{}," @RequestParam String hero,\n",[53,7467,7468],{"class":55,"line":163},[53,7469,7470],{}," @RequestParam String superpower,\n",[53,7472,7473],{"class":55,"line":186},[53,7474,7475],{}," @RequestHeader(name = \"X-Requested-With\", defaultValue = \"\") String requestedWith,\n",[53,7477,7478],{"class":55,"line":221},[53,7479,7480],{}," Model model\n",[53,7482,7483],{"class":55,"line":242},[53,7484,7485],{}," ) {\n",[53,7487,7488],{"class":55,"line":273},[53,7489,500],{"emptyLinePlaceholder":499},[53,7491,7492],{"class":55,"line":279},[53,7493,7494],{}," model.addAttribute(\"hero\", hero);\n",[53,7496,7497],{"class":55,"line":496},[53,7498,7499],{}," model.addAttribute(\"superpower\", superpower);\n",[53,7501,7502],{"class":55,"line":503},[53,7503,500],{"emptyLinePlaceholder":499},[53,7505,7506],{"class":55,"line":509},[53,7507,7508],{}," if (\"ajax\".equals(requestedWith)) {\n",[53,7510,7511],{"class":55,"line":515},[53,7512,7513],{}," return \"fragments/hero-fragment :: hero-fragment\";\n",[53,7515,7516],{"class":55,"line":521},[53,7517,860],{},[53,7519,7520],{"class":55,"line":527},[53,7521,500],{"emptyLinePlaceholder":499},[53,7523,7524],{"class":55,"line":533},[53,7525,7526],{}," return \"full-page-including-the-hero-fragment\";\n",[53,7528,7529],{"class":55,"line":539},[53,7530,282],{},[2207,7532,7534],{"id":7533},"auf-dem-weg-zur-single-page-application","Auf dem Weg zur Single Page Application",[18,7536,7537],{},"Mittlerweile haben wir die ajax-submit Komponente in unserer Anwendung an sehr vielen Stellen im Einsatz. Bisher hat\nsie sich als robust und einfach bewährt. Jeder im Team hat verstanden wie man es einsetzt und wie es funktioniert. Wir\nbrauchen keine Runtime a la ReactJS, wir müssen Hooks nicht verstehen, wir benötigen kein komplexes Build Setup. Wir\nmüssen nicht überlegen wie wir vue Komponenten integrieren und wir müssen uns keine Gedanken bzgl Client Side only\nFunktionalität machen.",[18,7539,7540],{},"Kurz gesagt: Wir bauen unser Frontend ohne modernes JavaScript Framework und sind (trotzdem) glücklich.",[18,7542,7543],{},"Zugegeben, die Progressive Enhancement Denkweise ist herausfordernd. Daran zu denken und zu überlegen, wie Anforderungen\nohne JavaScript gelöst werden können, hat unsere Industrie vielleicht verlernt? In der Vergangenheit haben mich\nKollegen, die überwiegend im Backend unterwegs sind gefragt, wie man denn heutzutage ein Frontend baut. Nehme man da\nAngular oder React? Über Anforderungen war man sich da aber noch nicht bewusst… Es war zumindest nicht die Einleitung\nzur Technologie Frage.",[649,7545,7547],{"id":7546},"herausforderungen-die-kommen-könnten","Herausforderungen die kommen (könnten)",[577,7549,7550],{},[580,7551,7552],{},[27,7553,7554],{},"Progressive Enhancement Denkweise",[18,7556,7557],{},"Sobald unsere Anwendung einen gewissen Charakter moderner Single Page Applications erreicht hat, wird das natürlich der\nDefault werden (im Geiste). Hier gilt es weiterhin an die Basis zu denken und nicht sofort an JavaScript only Lösungen.",[577,7559,7560],{},[580,7561,7562],{},[27,7563,7564],{},"History Handling",[18,7566,7567,7568,7573],{},"Vor und Zurück nach bestimmten Aktionen ist bisher keine Anforderung. Sollte diese Anforderung kommen ist das aber auch\nkein Hexenwerk. Nach JavaScript Aktionen die URL mit\nder ",[585,7569,7572],{"href":7570,"rel":7571},"https://developer.mozilla.org/en-US/docs/Web/API/History",[589],"history API"," ändern ist im Bereich des Möglichen 😉",[577,7575,7576],{},[580,7577,7578],{},[27,7579,7580],{},"State",[18,7582,7583,7584,7589],{},"Nach bestimmten Aktionen den State an mehreren Stellen im Browser aktualisieren. Hier muss denke ich abewägt werden, ob\nBibiliotheken eine Daseinsberechtigung bekommen, oder\nob ",[585,7585,7588],{"href":7586,"rel":7587},"https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent/CustomEvent",[589],"Custom Events"," ausreichen.",[607,7591,7592],{},"html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .s9eBZ, html code.shiki .s9eBZ{--shiki-default:#22863A;--shiki-dark:#85E89D}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .s4XuR, html code.shiki .s4XuR{--shiki-default:#E36209;--shiki-dark:#FFAB70}",{"title":48,"searchDepth":86,"depth":86,"links":7594},[7595,7596,7597,7598],{"id":6952,"depth":86,"text":6953},{"id":6995,"depth":86,"text":6996},{"id":7068,"depth":86,"text":7059},{"id":7533,"depth":86,"text":7534,"children":7599},[7600],{"id":7546,"depth":126,"text":7547},[613,614],"2020-06-23T11:20:03","Mit einigen Jahren JavaScript und Reactjs Erfahrung durfte ich Ende letzten Jahres (November 2019) Teil eines neuen\\nTeams und eines neuen Projektes werden. Das Projekt ist ein Traumprojekt jeden Entwicklers. Ein grüne Wiese Projekt mit\\n“freier” Technologiewahl. “Frei” in Form von man darf sich die Zeit für eine Risikoanalyse nehmen und moderne Tools und\\nFrameworks evaluieren.","https://synyx.de/blog/frameworkless-frontend-und-trotzdem-gluecklich/",{},"/blog/frameworkless-frontend-und-trotzdem-gluecklich",{"title":6890,"description":6900},"frameworkless-frontend-und-trotzdem-gluecklich","blog/frameworkless-frontend-und-trotzdem-gluecklich",[7611,7265,7612],"development","progressive-enhancement","Mit einigen Jahren JavaScript und Reactjs Erfahrung durfte ich Ende letzten Jahres (November 2019) Teil eines neuen Teams und eines neuen Projektes werden. Das Projekt ist ein Traumprojekt jeden Entwicklers. Ein grüne Wiese Projekt mit 'freier' Technologiewahl. 'Frei' in Form von man darf sich die Zeit für eine Risikoanalyse nehmen und moderne Tools und Frameworks evaluieren.","JGt7bZJ_gokf64kVf7XrDHMaEJ2JdAYkb3HTHT7LZEw",{"id":7616,"title":7617,"author":7618,"body":7621,"category":7711,"date":7712,"description":7713,"extension":617,"link":7714,"meta":7715,"navigation":499,"path":7716,"seo":7717,"slug":7625,"stem":7718,"tags":7719,"teaser":7724,"__hash__":7725},"blog/blog/kandddinsky-2019-in-berlin.md","KanDDDinsky 2019 in Berlin",[9,7619,7620],"heib","contargo_poetzsch",{"type":11,"value":7622,"toc":7709},[7623,7626,7629,7634,7641,7646,7649,7652,7655,7660,7674,7679,7682,7687,7690,7695,7698,7703,7706],[14,7624,7617],{"id":7625},"kandddinsky-2019-in-berlin",[18,7627,7628],{},"Die KanDDDinsky Konferenz in Berlin fand 2019 in einem Hotel nahe dem Alexanderplatz statt. Ein Tag vor der eigentlichen\nKonferenz gab es zusätzlich einen Workshoptag der DDD Europe. Hier wurden drei tagesfüllende Workshops angeboten, bei\ndenen tiefer und in kleinen Gruppen auf DDD Themen eingegangen wurde. Die Konferenz selbst bestand zum einen Teil aus\neinstündigen Vorträgen, als auch aus zweistündigen “Hands-on” Workshops. Dazwischen gab es in den Kaffee Pausen\nreichlich Snacks sowie Abend- und Mittagessen. Mit ca. 200 Teilnehmern war alles recht übersichtlich und nicht\nüberfüllt. Die Konferenzsäle befanden sich direkt im Hotel, sodass wir nach dem Frühstück gleich starten konnten.\nNachfolgend findet ihr Erfahrungsberichte zu den einzelnen Workshops und Vorträgen, welche uns in guter Erinnerung\ngeblieben sind sowie ein abschließendes Fazit.",[18,7630,7631],{},[27,7632,7633],{},"Workshop: Thomas Coopman — Event Storming",[18,7635,7636,7637,7640],{},"Der DDD Workshop “EventStorming Introduction” von Thomas Coopman fand am ersten Tag in Kollaboration mit DDD Europe\nstatt. Das Ziel des Workshops EventStorming Introduction war es, den Teilnehmern die Grundlagen von Eventstorming näher\nzu bringen und ihnen zu zeigen, wie sie die Methodik im eigenen Unternehmen einführen können. Dazu haben sich die\nTeilnehmer nach einer kurzen Vorstellung der Problemdomäne am Prozess selbst versucht, unter ständiger Anleitung von\nThomas Coopman. Die Problemdomäne bestand aus einem kurzfristigen Autoverleih (vergleichbar mit den E-Scootern in\nmanchen Städten). Hierzu wurde zunächst klassisch das ",[573,7638,7639],{},"Big Picture"," erarbeitet und das Modell später Schritt für Schritt\nverfeinert und ergänzt. Anschließend wurde der Happy Path im “explicit” und im “reverse” Walkthrough explizit\ndurchgespielt. Interessant war, dass Coopman Brandolinis Prozess angepasst und eine eigene Sprache entwickelt hat:\nBereits während des Big Pictures wurde nicht mit Aggregates und Repositories, sondern mit Commands, die Events\nverursachen, und Rules, die greifen, gearbeitet. Dieser Ansatz hat dabei geholfen User Stories und Acceptance-Tests\nschneller zu finden und Example Mapping effizienter durchführen zu können. Um die Einführung in den Unternehmen zu\nvereinfachen, hat Coopman ständig interveniert und wertvolle Tipps aus seiner umfangreichen Erfahrung mitgegeben, sei es\nfür Teilnehmer (“Domänenexperten sind da um ausgefragt zu werden”) oder für Moderatoren (“mind. 1m Papier/Person”,\n“keine Super-Stickys kaufen”). Im Workshop konnte Thomas viele Kniffe an uns weitergeben — es wäre interessant, an\neinem weiteren Tag zu sehen, wie Coopman den Prozess abgeschlossen hätte und beim Sprung zum Domänenmodell vorgegangen\nwäre.",[18,7642,7643],{},[27,7644,7645],{},"Workshop: Marco Heimeshoff, Roman Sachse — Why are words, how do they mean?",[18,7647,7648],{},"Einer der “Hands on”-Workshops an den beiden Konferenztagen hieß: “Why are words, how do they mean?” und wurde von\nMarco Heimeshoff und Roman Sachse moderiert. Aus der Beschreibung war nur zu erkennen, dass es sich irgendwie um\nDomain-Driven Design (DDD) drehen musste. Ich erhoffte mir einen praktischen Workshop mit geringem Theorieanteil und\nnach einer kurzen Einführung ging es direkt mit dem praktischen Teil los. Aufgabenstellung war, die Domäne für ein Hotel\nUnternehmen zu entwickeln. Der Raum wurde in mehrere kleine Gruppen aufgeteilt, von denen jede eine Methode des Event\nModeling auf einen Teilbereich der Hotelbranche anwenden durfte (beispielsweise Event Storming oder Domain Story\nTelling). Meine Gruppe wählte den Themenbereich “Verwaltung der Hotelzimmer”. Dabei ging es z.B. darum, wie ein\nKinderbett auf Anfrage des Kunden auf das Hotelzimmer gelangt oder wie die Räume gereinigt werden. Mit Hilfe des Domain\nStory Telling bildeten wir einfache Sätze, welche aus Actors, Activities und Work Objects bestanden — wodurch sich schon\nbald eine Vielzahl von vernetzten Arbeitsabläufen bildete. Hier ließ sich zum einen schon eine Sprache erkennen, zum\nanderen Stellen, welche schwerer zu modellieren waren als andere.",[18,7650,7651],{},"Nach ungefähr 45 Minuten begann der zweite Teil des Workshops. Hier ging es darum die diskutierte Domäne in Programmcode\numzusetzen. Einer der Mentoren bediente die Tastatur und der Raum mit den Teilnehmern konnte bestimmen, in welche\nRichtung entwickelt wurde. Wir begannen einen Teilbereich zu implementieren der zuvor von einer der Gruppen modelliert\nwurde. Die Mentoren sind dabei dynamisch auf Anmerkungen oder Fragen aus dem Teilnehmerkreis eingegangen. Kam eine\nProblemstellung auf, wurde die Lösung direkt erklärt und beispielhaft umgesetzt.",[18,7653,7654],{},"Im ersten Teil des Workshops konnten die Teilnehmer eine Form des Event Modeling mitgestalten und sich in die Domäne\nhineinversetzen. Der zweite Teil hat abstrakte Konzepte des DDD anschaulich erklärt und praktisch umgesetzt. Man konnte\nden Inhalt mitgestalten und sich aktiv beteiligen. Leider dauerte der Workshop lediglich zwei Stunden und die Mentoren\nkonnten nicht auf alle Modelle der einzelnen Teams eingehen. Alles in allem war es ein gelungener Workshop, der sicher\nauch in einem längeren Format gut gelungen wäre.",[18,7656,7657],{},[27,7658,7659],{},"Vortrag: Roman Sachse — Is Maybe an Option",[18,7661,7662,7663,7666,7667,1628,7670,7673],{},"Im Vortrag “Is Maybe an Option” hat sich Roman Sachse an ein grundlegendes Problem gewagt, das in DDD meist\nstillschweigend umgangen wird: ",[573,7664,7665],{},"Nullability im Domain Model."," Sei es die Abwesenheit eines Datenbank-Feldes, der\nRückgabewert einer Funktion, die etwas nicht parse-bares parsen soll oder dass Felder wegen des objektrelationalen\nMappings leer gelassen werden müssen — Sachse hat zu vielen Problemstellungen aus der echten Welt anhand kurzer\nF#-Schnipsel erklärt, mit welchen Strategien sie angegangen werden können. Dabei ist er stark auf die Datentypen\n",[573,7668,7669],{},"Option",[573,7671,7672],{},"Maybe"," eingegangen. Auch wenn sich die Erkenntnisse nicht eins zu eins auf ein Java- oder .NET-Projekt\nübertragen lassen, hat Roman Sachse mit Ansätzen, die sich auch mit Generics und Optionals umsetzen lassen, neue\nDenkanstöße gegeben.",[18,7675,7676],{},[27,7677,7678],{},"Vortrag: Dennis Doomen — A practical introduction to DDD, CQRS…",[18,7680,7681],{},"Domain-Driven Design, Command-Query-Responsibility-Segregation und Event Sourcing sind Paradigmen, die hervorragend\nzusammenpassen und meist gemeinschaftlich in einem Projekt eingeführt werden. In seinem Vortrag “A practical\nintroduction to DDD, CQRS and Event Sourcing” ist Dennis Doomen zunächst in einem kleinen Rundumschlag auf die Themen\neingegangen und hat sich dann an “echten” Problemstellungen abgearbeitet: “Was passiert, wenn ich das ReadModel anpassen\nmuss?” “Wie trenne ich die Komponenten sauber?” “Was sind Alternativen, durch die ich bestimmte Komponenten ersetzen\nkann?” Der Vortrag hat nicht nur Grundlagen für Einsteiger geliefert, sondern auch Tipps für übliche Probleme. Die\nFolien sind sicherlich eine gute Grundlage für die Verwendung der Paradigmen in großen, änderungsaffinen Projekten",[18,7683,7684],{},[27,7685,7686],{},"Vortrag: Michael Plöd — Pitching DDD to the management",[18,7688,7689],{},"In den “C-level”-Management-Ebenen wird DDD immer noch oft als esoterischer, teurer Quatsch abgetan — zu versuchen,\nmit Event Storming o.ä. zu punkten, hilft oft nicht. Doch viele der Punkte, die für das Management aktuell wichtig sind,\nsei es beispielsweise Agilität, finden sich in den Ansätzen von Domain-Driven Design wieder. Michael Plöd hat in seinem\nVortrag analytisch dargelegt, wie genau das Paradigma helfen kann, die Ziele der Manager zu erreichen — und dabei Talent\ndarin bewiesen, Management und Entwicklung auf einen gemeinsamen Nenner zu bringen, Argumentationsgrundlagen zu finden\nund mit kleinen Tricks ans Ziel zu kommen. Obwohl viele Ideen aus dem Vortrag analog oder leicht angepasst im nächsten\nMeeting verwendet werden können, ging es um mehr, nämlich um die Denkweise, mit der die unterschiedlichen Sichtweisen\nund Ziele der leitenden und ausführenden Teile zusammengeführt werden können.",[18,7691,7692],{},[27,7693,7694],{},"Vortrag: Philipp Krenn — Building Distributeted Systems in Distributed Team",[18,7696,7697],{},"Im Vortrag “Building Distributeted Systems in Distributed Team” von Philipp Krenn ging es um die verteilte Arbeit bei\nElastic. Das klassische Büro ist dort eher der Ausnahmefall, die Mitarbeiter sind weltweit verteilt und arbeiten im\nwesentlichen von zu Hause aus. Es wurde aufgezeigt wie dies funktioniert, welche Vorteile es mit sich bringt (wie z.B.\nhohe Flexibilität), aber auch welche Nachteile (z.B. die Remote-Releaseparty fällt eher kleiner aus). Wichtigste\nErkenntnis war für mich, dass es einfacher ist, wenn das gesamte Team verteilt ist, im Gegensatz zu einzelnen,\nverteilten Teammitgliedern (oder an einzelnen Tagen verteilten). Dies deckt sich auch mit der Erfahrung aus eigenen\nProjekten, wo viele klassische Arbeitsplätze im Büro haben und einzelne remote arbeiten.",[18,7699,7700],{},[27,7701,7702],{},"PARTY SESSION — So You Want to be a Rockstar Developer?",[18,7704,7705],{},"Die Party Session von Dylan Beattie fand am ersten Abend der Konferenz Tage statt. Zum Spaß entwickelte er eine\nProgrammiersprache, die aus Songtexten bestand und veröffentlichte eine Spezifikation auf Github. Er zeigte sehr\nunterhaltsam, wie sich das Projekt in der Open Source Community verselbstständigte. Bald gab es nicht nur die\nSpezifikation, sondern auch die Möglichkeit Programme in „Rockstar“ zu schreiben. Mit humorvollen Beispielen und einer\nGitarreneinlage bildete der Vortrag den perfekten Abschluss des ersten Tages.",[18,7707,7708],{},"Die Kandddinsky ist eine eher kleine Konferenz. Die Vortragssäle sind nicht überfüllt und auch in den Workshops lässt es\nsich gut mitarbeiten. Die Speaker sind Teilnehmer der Konferenz und stehen in den Pausen für Fragen und Diskussionen zur\nVerfügung. Es ist nicht unwahrscheinlich einen der Speaker als Teilnehmer eines Workshops oder Vortrags anzutreffen. Der\nInhalt der Vorträge war oft wenig technisch und auch der Anteil an DDD Themen war weniger als zuvor gedacht. Für DDD\nNeulinge wäre auch eine Einführungsveranstaltung zu Beginn der Konferenz hilfreich gewesen. Für unsere Workshopbesucher\nhat sich der zusätzliche Tag in Berlin gelohnt. Die Sessions mit kleinem Publikum haben konzentriert Wissen vermittelt\nund neue Impulse gesetzt.",{"title":48,"searchDepth":86,"depth":86,"links":7710},[],[613,5833,614],"2019-12-13T13:56:04","Die KanDDDinsky Konferenz in Berlin fand 2019 in einem Hotel nahe dem Alexanderplatz statt. Ein Tag vor der eigentlichen\\nKonferenz gab es zusätzlich einen Workshoptag der DDD Europe. Hier wurden drei tagesfüllende Workshops angeboten, bei\\ndenen tiefer und in kleinen Gruppen auf DDD Themen eingegangen wurde. Die Konferenz selbst bestand zum einen Teil aus\\neinstündigen Vorträgen, als auch aus zweistündigen “Hands-on” Workshops. Dazwischen gab es in den Kaffee Pausen\\nreichlich Snacks sowie Abend- und Mittagessen. Mit ca. 200 Teilnehmern war alles recht übersichtlich und nicht\\nüberfüllt. Die Konferenzsäle befanden sich direkt im Hotel, sodass wir nach dem Frühstück gleich starten konnten.\\nNachfolgend findet ihr Erfahrungsberichte zu den einzelnen Workshops und Vorträgen, welche uns in guter Erinnerung\\ngeblieben sind sowie ein abschließendes Fazit.","https://synyx.de/blog/kandddinsky-2019-in-berlin/",{},"/blog/kandddinsky-2019-in-berlin",{"title":7617,"description":7628},"blog/kandddinsky-2019-in-berlin",[7720,7721,7722,7611,7723,5846],"berlin","conference","ddd","domain-driven-development","Erfahrungsbericht der KanDDDinsky Konferenz 2019","LkddH4oMqPym1394NkfmhIu5bWj7Mx9y4njBxfheu0A",{"id":7727,"title":7728,"author":7729,"body":7731,"category":7784,"date":7785,"description":7786,"extension":617,"link":7787,"meta":7788,"navigation":499,"path":7789,"seo":7790,"slug":7735,"stem":7791,"tags":7792,"teaser":7793,"__hash__":7794},"blog/blog/limit-of-active-devices-during-android-device-tests.md","Limit of active devices during Android device tests",[7730],"ullinger",{"type":11,"value":7732,"toc":7782},[7733,7736,7739,7742,7747,7750,7753,7765,7768,7773,7776,7779],[14,7734,7728],{"id":7735},"limit-of-active-devices-during-android-device-tests",[18,7737,7738],{},"Some time ago we decided to expand our pool of Android devices. These are used in our Continuous Integration (CI)\npipeline. But running our tests we noticed a peculiar behaviour: Only 3 devices would execute the test suite while the\nrest would wait. As soon as a device finished one of the waiting devices would proceed.",[18,7740,7741],{},"As a result our device test would run roughly twice as long. We value fast build times and rapid feedback while still\nrunning large test suites. We had to fix this problem.",[18,7743,7744],{},[2223,7745],{"alt":48,"src":7746},"data:image/svg+xml;base64,<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="488" height="367" viewBox="-0.5 -0.5 488 367" content="&lt;mxfile host=&quot;www.draw.io&quot; modified=&quot;2019-10-08T12:02:27.471Z&quot; agent=&quot;Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0&quot; etag=&quot;-qPDtInxQcoZV4gvFa18&quot; version=&quot;12.0.2&quot; type=&quot;device&quot; pages=&quot;1&quot;&gt;&lt;diagram id=&quot;Ux5he7PZbZpKe4E44oJV&quot; name=&quot;Page-1&quot;&gt;7Zpfd9ogGMY/jZf1GEgwuWxtt910p+d052y72mEBDacxOMQa++lHDPkLVrvVmE1zozwhJLy/FwKPDuBknn4UeBHdc0LjARiRdABvBwA40PXUR6ZscsV3/VyYCUZ0pUp4ZC9UiyOtrhihy0ZFyXks2aIphjxJaCgbGhaCr5vVpjxu3nWBZ9QQHkMcm+pXRmSkVQcF1YlPlM0ifWsfjPMTc1xU1j1ZRpjwdU2CdwM4EZzL/Ns8ndA4C14Rl/y6DzvOlg8maCIPucBLV/fra7xefXbJy/rX99kPMr4CeSvPOF7pDuuHlZsiAksp+FPZeWcAb8qejFSB4GVEiS7gmM0S9T1Uz0SFEiI5j6urFlmT83SWpcpwzsOn1WK4LbBwORRiy+9G6CTI2svvPeExF9uHgaPtoc5MWRzX9On2ULruDxWSpjsD5ZThV3lL+ZxKsVFV9AWBBqYzFuniusLv+lqLauRhURHrlJuVLVdU1BcN5g2QoAUS3MGpiAlLIiqYbEWqUlXYCVMBMc+ckveUV0+k+oi2R1dUARqixuGbkBFo1XGPxHxsMP9Cl1IpNKXhSjKeXBLgbQmgJo99GQBdy7g+2rAuurB/8r0wPWxQO8Bk6tiYOvBoUH0DqnOyVypOwigLe6cI2jPkH82z0BnCxgGO9W41V0CnWwKdgpfn9hwQMgDtWvv8n4B8rxlpp9+8ium2xss9J17AadPoOS/30FVIH7aACU+oufvDmJD32v0BNxj6lvFWDEc4GnrmImMMm7hcy5rDa1FHxyLqXYg2iHqtGRU5NoTWdeKxGHnmLHlhVGcUQJMQ6pTQwbuzPhBSOMg4+Gm3xigKw/fBBNsvO2AZSF6nmA5wMGlCrjMruMrkkozTBKHCIDbfsggPg2BcCN9zwYeFcJtqCHlpUy89qB236lqGOBdTJvMWRyOgy1mDV0pwCqFqMCtsaoV2c3nnKDFc6xZCFQC+EiE9IMMlFjMq975QzKSoQbcxLzRBYyzZc/OBbYmg7/DAWSJfMe5aW/e8n/oiUPO/W+2UE7xuyPNaDeVxMBrapmXZ67/IVNMZ2DGhXOyeA5faFguvY7vHCwyoZ2X3GIa4+dru1W4Hmeuus7J7QNBzuweZy66zsnsgaNk9PcdlLr/Oyu2BHvq35j/bb8m93dWc3O0B4967Peji373qJMDR6d0edHHkXmdkec116/Yg81emHhPqyO0xMCHXNpQ69XuQuYu+pc8spEuDl+q4bEV+X55ryaCXhZGFOL7WJ+aMkOw2N+uISfq4wFujZa0IZtz4KiFlJrwDh7Zz4dtmNNuEBt6+7VXF6i+JufVR/bET3v0G&lt;/diagram&gt;&lt;/mxfile&gt;"><defs><filter id="dropShadow"><feGaussianBlur in="SourceAlpha" stdDeviation="1.7" result="blur"/><feOffset in="blur" dx="3" dy="3" result="offsetBlur"/><feFlood flood-color="#3D4574" flood-opacity="0.4" result="offsetColor"/><feComposite in="offsetColor" in2="offsetBlur" operator="in" result="offsetBlur"/><feBlend in="SourceGraphic" in2="offsetBlur"/></filter></defs><g filter="url(#dropShadow)"><rect x="0" y="0" width="480" height="360" fill="#ffffff" stroke="#000000" pointer-events="none"/><rect x="0" y="0" width="26.67" height="62.67" fill="#ffffff" stroke="#000000" pointer-events="none"/><g transform="translate(7.5,24.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="10" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">#</div></div></foreignObject><text x="5" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">#</text></switch></g><rect x="140" y="0" width="340" height="60" fill="#ffffff" stroke="#000000" pointer-events="none"/><g transform="translate(264.5,22.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="89" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">Test execution</div></div></foreignObject><text x="45" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">Test execution</text></switch></g><rect x="0" y="60" width="140" height="130" fill="#ffffff" stroke="#000000" pointer-events="none"/><g transform="translate(8.5,71.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="8" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">1</div></div></foreignObject><text x="4" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">1</text></switch></g><g transform="translate(8.5,102.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="8" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">2</div></div></foreignObject><text x="4" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">2</text></switch></g><g transform="translate(8.5,133.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="8" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">3</div></div></foreignObject><text x="4" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">3</text></switch></g><g transform="translate(8.5,165.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="8" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">4</div></div></foreignObject><text x="4" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">4</text></switch></g><rect x="159.83" y="70.5" width="173.33" height="15.67" fill="#aaddff" stroke="none" pointer-events="none"/><rect x="160" y="101.5" width="140" height="16" fill="#aaddff" stroke="none" pointer-events="none"/><rect x="160" y="133" width="160" height="16" fill="#aaddff" stroke="none" pointer-events="none"/><rect x="300" y="165" width="150" height="16" fill="#ffe6cc" stroke="#d79b00" pointer-events="none"/><path d="M 300.3 164.81 L 299.58 117.23" fill="none" stroke="#000000" stroke-miterlimit="10" stroke-dasharray="3 3" pointer-events="none"/><rect x="0" y="200" width="140" height="130" fill="#ffffff" stroke="#000000" pointer-events="none"/><g transform="translate(8.5,211.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="8" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">1</div></div></foreignObject><text x="4" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">1</text></switch></g><g transform="translate(8.5,242.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="8" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">2</div></div></foreignObject><text x="4" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">2</text></switch></g><g transform="translate(8.5,273.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="8" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">3</div></div></foreignObject><text x="4" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">3</text></switch></g><g transform="translate(8.5,305.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="8" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(102, 102, 102); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;">4</div></div></foreignObject><text x="4" y="12" fill="#666666" text-anchor="middle" font-size="12px" font-family="Helvetica">4</text></switch></g><rect x="159.83" y="210.5" width="173.33" height="15.67" fill="#aaddff" stroke="none" pointer-events="none"/><rect x="160" y="241.5" width="140" height="16" fill="#aaddff" stroke="none" pointer-events="none"/><rect x="160" y="273" width="160" height="16" fill="#aaddff" stroke="none" pointer-events="none"/><rect x="160" y="304.5" width="150" height="16" fill="#ffe6cc" stroke="#d79b00" pointer-events="none"/><g transform="translate(45.5,25.5)"><switch><foreignObject style="overflow:visible;" pointer-events="all" width="48" height="12" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; vertical-align: top; width: 49px; white-space: nowrap; overflow-wrap: normal; text-align: center;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;white-space:normal;">Devices</div></div></foreignObject><text x="24" y="12" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">Devices</text></switch></g></g></svg>",[18,7748,7749],{},"Investigating every sensible gradle switch or task configuration we still were unsuccessful. Then, frustrated, we\ndecided to dig into the source code of the Google Android build tools project and understand just what was going on.",[18,7751,7752],{},"There, we realized that Google decided to use the Java Common Pool to trigger the device tests in parallel. This pool is\nconfigured to use n = CoreCount – 1 threads per default. This observation fit perfectly, as we were currently running\ndevice tests on a Dual Core with Hyperthreading (4 “virtual” cores).",[18,7754,7755,7756,7761,7762],{},"The default number of threads for\nthe ",[585,7757,7760],{"href":7758,"rel":7759},"https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ForkJoinPool.html",[589],"Common Pool"," can be configured\nwith the JVM option ",[50,7763,7764],{},"java.util.concurrent.ForkJoinPool.common.parallelism",[18,7766,7767],{},"Gradle is triggered by our jenkins agent. We decided to “fix” our gradle installation by updating our installation-wide\ngradle-config:",[18,7769,7770],{},[27,7771,7772],{},"/var/lib/jenkins/.gradle/gradle.properties:",[18,7774,7775],{},"`#Allow more simultaneous device tests",[18,7777,7778],{},"systemProp.java.util.concurrent.ForkJoinPool.common.parallelism=10`",[18,7780,7781],{},"Because the main load of the test execution should be carried by the devices itself we expected no negative consequences\nin this scenario. And indeed, setting this option to a higher value did allow all device to test in parallel, saving us\nvaluable time.",{"title":48,"searchDepth":86,"depth":86,"links":7783},[],[613,5834,614,997],"2019-10-08T14:24:36","Some time ago we decided to expand our pool of Android devices. These are used in our Continuous Integration (CI)\\npipeline. But running our tests we noticed a peculiar behaviour: Only 3 devices would execute the test suite while the\\nrest would wait. As soon as a device finished one of the waiting devices would proceed.","https://synyx.de/blog/limit-of-active-devices-during-android-device-tests/",{},"/blog/limit-of-active-devices-during-android-device-tests",{"title":7728,"description":7738},"blog/limit-of-active-devices-during-android-device-tests",[],"Some time ago we decided to expand our pool of Android devices. These are used in our Continuous Integration (CI) pipeline. But running our tests we noticed a peculiar behaviour: Only 3 devices would execute the suite while the rest would wait. Then as soon as a device finished one of the waiting devices would proceed with the test suite.","y-Hh4822zLikheKy9i4ThKw_hAy-qX4soVfd6xPaMu4",{"id":7796,"title":7797,"author":7798,"body":7800,"category":7897,"date":7898,"description":7899,"extension":617,"link":7900,"meta":7901,"navigation":499,"path":7902,"seo":7903,"slug":7804,"stem":7904,"tags":7905,"teaser":7907,"__hash__":7908},"blog/blog/code-with-attitude-part-1-values.md","Code with Attitude – Part 1: Values",[7799],"franke",{"type":11,"value":7801,"toc":7894},[7802,7805,7808,7813,7820,7824,7827,7836,7842,7845,7851,7854,7857,7863,7866,7869,7872,7877,7884,7887],[14,7803,7797],{"id":7804},"code-with-attitude-part-1-values",[18,7806,7807],{},"At synyx we recently came forward with a new tagline, trying to express our general mindset that is distinctive\nconcerning the way we work with clients, the software community and each other. The line is:",[18,7809,7810],{},[27,7811,7812],{},"Code with Attitude",[18,7814,7815,7816,7819],{},"In the instant I heard this simple sentence for the first time it triggered a multitude of associations, memories and\nemotions that I connect with the word “attitude” in conjunction with software development. One after another all the\nsituations from different projects during my time at synyx came to mind, where a certain expression of attitude helped\non the way to success. This tells me that our marketing dudes did an excellent job expressing the synyx mindset in one\npowerful and simple statement and also that I have ",[573,7817,7818],{},"a lot"," to say about it. This is why I want to share my\ninterpretation of “Code with Attitude” within this blog series.",[2207,7821,7823],{"id":7822},"part-1-courage-to-be-true-to-your-values","Part 1 – Courage to be true to your values",[18,7825,7826],{},"As a human being everybody has a bunch of values and moral standards that you deem to be important and that greatly\ninfluence where and how you live and work. Now you are probably thinking of something like “Don’t code guidance software\nfor nuclear weapons” or “If you manager tells you to tweak the emission control software for the new car model – say\nno!”. Although these are obvious examples, this is not quite what I mean as I have never been in an extreme situation\nlike this – nor have 99% of software developers. What I mean is a set of values, that influence your day-to-day work,\nlike the dedication to quality and sustainability, respectful communication or keeping an open mind.",[18,7828,7829,7830,7835],{},"Behind the term “your values” lies more complexity than you might expect. It does not only mean your personal,\nindividual values but also those ",[585,7831,7834],{"href":7832,"rel":7833},"https://synyx.de/wer/werte/",[589],"of your employer"," and – if your employer is a service\nprovider – even the values of your clients. If you are working for your client it is in his best interest when you take\nhis sense of what is right or wrong into account for the actions you take or don’t take. Ideally those three value\nsystems should be mostly overlapping. If they are heavily misaligned this is a situation that you should not be in.",[18,7837,7838],{},[2223,7839],{"alt":7840,"src":7841},"Synyx Werte: Nachhaltigkeit, Soziale Verantwortung, Auf dem Weg zur Exzellenz, Leidenschaft, Eigenverantwortung, offen & ehrlich","https://media.synyx.de/uploads/2019/03/synyx_values-768x432.png",[18,7843,7844],{},"The synyx values - sustainability, social responsibility, pursuit of excellence, passion, individuality,\nself-responsibility, honest&candor",[18,7846,7847,7848,986],{},"Becoming aware of these values is only the first step. The second one is to venerate and defend them, which often needs\none of the core agile values – ",[573,7849,7850],{},"courage",[18,7852,7853],{},"In fact I am convinced that it is an integral part of our job to bring up the courage to act on our own and our client’s\nvalues. But what does this mean in practice?",[18,7855,7856],{},"There are countless examples of situations where that courage is needed. There is the business guy from the “quality is\nkey” company, who wants to cut software tests to deliver faster to the clients. There are multi-team environments in\nthe “collaborative mind” company where every team is keeping to their own instead of collaborating, feeding to the “Us\nand Them” mindset. There is the Project Manager in the “customer centric” company, conducting a\ndesign-up-front-waterfall-big-bang-release project without the feedback of one single customer. There is the\nwhatever-guy from the “security first” company not caring about data encryption. There is the developer in the “open\nsource” company claiming ownership of “his” part of code within the project. There are quality engineers in the “work\nsmarter not harder” company clicking through thousands of pages of manual testing plans. If you witness any of those or\nsimilar situations at your employer or client, it is your obligation as a rational and professional person to address\nand try to improve the issue. More often than not I observe external contractors or employees resigning to the\ncircumstances thinking “Fuck it, I’m doing my job and getting paid for what I am told”. In my opinion this is the\nopposite of “doing your job”. Your job is to do what is best for your employer/client/project and that means to bring up\nthe courage to question established practices, to actively address problems, to call out bullshit if necessary.",[18,7858,7859],{},[2223,7860],{"alt":7861,"src":7862},"Scrum Werte","https://media.synyx.de/uploads/2019/03/agile_values.png",[18,7864,7865],{},"For many people this might seem counter-intuitive. The conservative employer thinks he needs people, who do as they are\ntold. The conservative employee keeps to himself being afraid to offend anybody and fearing the repercussions. In fact\nthe opposite is true and both would highly benefit from “showing more attitude”. As an employer you should embrace\nemployees and external contractors, who question things and who challenge everything against your values and their own\nvalues. This is the only way to improve, to produce sustainable products, to come closer to excellence and to create the\nsolutions you really want! As an employee questioning things and standing up for your values makes your dayjob more\nexciting and meaningful instantly! It gives you the chance to do something that you really care about from your own\naccord, thus increasing your motivation and quality of life and consequentially improving the quality of your work\nresults!",[18,7867,7868],{},"For me this is one of the core parts of “Code with attitude” and one of the most basic mindsets that we live and breathe\nhere at synyx. It is the attitude to have the strong intention to understand our client’s value system and needs and to\nhave the courage to actively act in their best interest, while incorporating our own values for our client’s good.",[18,7870,7871],{},"During my work in different projects this mindset has helped me and our clients countless times. I don’t know how often\nwe had to defend our values of sustainability, quality and excellence by emphasizing the importance of things like code\nquality, automated tests, slack time and refactorings to keep a project afloat or even rescue it from certain failure.\nThere were times when clients needed to be reminded of their own core values like customer friendliness, innovativeness,\ndata minimization or accessability. Otherwise their projects would have gone in (for them!) completely undesirable\ndirections.",[18,7873,7874],{},[573,7875,7876],{},"It is part of our job to get to know the client’s values and remind them!",[18,7878,7879,7880,7883],{},"One time I actually witnessed a project manager from a larger corporation with a\nhundreds-of-developers-software-department make the claim that his company is ",[573,7881,7882],{},"not"," a software company and\ntherefore the software doesn’t have to be developed on a professional level. The guy in a room of ten people who spoke\nup was me – the only guy not employed by the company. I love being that guy, it is a great part of what makes my job\nmeaningful.",[18,7885,7886],{},"The bottom line is that to be of maximum value for you and your client you have to be aware of that complex value system\nmentioned above and you have to bring up the courage to act on it. To say “no” at the right times, to say “yes, but” at\nthe right times, to encourage at the right times. Code with attitude!",[18,7888,7889],{},[585,7890,7893],{"href":7891,"rel":7892},"https://twitter.com/indyarni",[589],"@indyarni",{"title":48,"searchDepth":86,"depth":86,"links":7895},[7896],{"id":7822,"depth":86,"text":7823},[613,614],"2018-11-16T12:29:29","At synyx we recently came forward with a new tagline, trying to express our general mindset that is distinctive\\nconcerning the way we work with clients, the software community and each other. The line is:","https://synyx.de/blog/code-with-attitude-part-1-values/",{},"/blog/code-with-attitude-part-1-values",{"title":7797,"description":7807},"blog/code-with-attitude-part-1-values",[7906,7611,6885],"agile"," At synyx we recently came forward with a new tagline, trying to express our general mindset that is distinctive concerning the way we work with clients, the software community and each other. The line is: 'Code with Attitude'. In the instant I heard this simple sentence for the first time it triggered a multitude of associations, memories and emotions that I connect with the word 'attitude' in conjunction with software development.","tbrGLbKylIO0pyaul1N1XDF1TGfKGhQk-zGgy0J997U",{"id":7910,"title":7911,"author":7912,"body":7913,"category":8857,"date":8858,"description":8859,"extension":617,"link":8860,"meta":8861,"navigation":499,"path":8862,"seo":8863,"slug":7917,"stem":8864,"tags":8865,"teaser":8866,"__hash__":8867},"blog/blog/wie-meine-entwicklungsumgebung-eingerichtet-ist.md","Wie meine Entwicklungsumgebung eingerichtet ist",[6892],{"type":11,"value":7914,"toc":8852},[7915,7918,7921,7924,7947,7950,7953,7968,7974,7977,8059,8078,8081,8187,8190,8205,8208,8239,8242,8324,8433,8440,8447,8698,8701,8704,8846,8849],[14,7916,7911],{"id":7917},"wie-meine-entwicklungsumgebung-eingerichtet-ist",[18,7919,7920],{},"Beim synyx Camp vor zwei Wochen haben wir uns unter anderem über das Setup unserer Entwicklungsumgebungen unterhalten.\nIm Folgenden möchte ich kurz berichten, wie ich meine eingerichtet habe und welche Programme ich in meiner alltäglichen\nArbeit nicht mehr missen möchte.",[18,7922,7923],{},"Kurz angerissen werden:",[577,7925,7926,7933,7940],{},[580,7927,7928],{},[585,7929,7932],{"href":7930,"rel":7931},"https://synyx.de/blog/2018-11-09-entwicklungsumgebung/?page=1#terminal",[589],"Terminal",[580,7934,7935],{},[585,7936,7939],{"href":7937,"rel":7938},"https://synyx.de/blog/2018-11-09-entwicklungsumgebung/?page=1#git",[589],"Git",[580,7941,7942],{},[585,7943,7946],{"href":7944,"rel":7945},"https://synyx.de/blog/2018-11-09-entwicklungsumgebung/?page=1#programme",[589],"Programme",[2207,7948,7932],{"id":7949},"terminal",[18,7951,7952],{},"Ich arbeite mit einem MacBook und habe von Anfang an iTerm2 genutzt. Weshalb weiß ich leider nicht mehr. Vielleicht gab\nes damals für das von Haus aus installierte Terminal keine Möglichkeit ein Dark Theme zu verwenden. Vielleicht gab es\ndamals auch nicht die Möglichkeit ein Terminalfenster in der Horizontalen zu splitten. Beides ist mittlerweile möglich.\nAndere von mir genutzte iTerm2 Features kommen mir gerade nicht in den Sinn…",[18,7954,7955,7956,7961,7962,7967],{},"Viel interessanter finde ich jedenfalls die Frage, welche Shell installiert ist und wie sie personalisiert ist. Ich\nverwende ",[585,7957,7960],{"href":7958,"rel":7959},"https://github.com/zsh-users/zsh",[589],"zsh"," und natürlich(?)\nauch ",[585,7963,7966],{"href":7964,"rel":7965},"https://github.com/robbyrussell/oh-my-zsh",[589],"oh-my-zsh",", um meine Shell im Look & Feel anzupassen und mit Plugins\nerweitern zu können.",[18,7969,7970],{},[2223,7971],{"alt":7972,"src":7973},"Code Beispiel","https://media.synyx.de/uploads/2019/03/prompt-1-768x578.png",[18,7975,7976],{},"Folgende oh-my-zsh Plugins habe ich installiert:",[577,7978,7979,8000,8028,8044],{},[580,7980,7981,7982],{},"z",[577,7983,7984,7990],{},[580,7985,7986,7987,7989],{},"führt eine Liste von Pfaden, in welche man auf der Shell mit ",[50,7988,6879],{}," navigiert und matcht die eingegebenen Zeichen\nauf den meist besuchten Pfad",[580,7991,7992,7993,7996,7997],{},"z. B. führt bei mir der Befehl ",[50,7994,7995],{},"z dot"," zu ",[50,7998,7999],{},"/Users/seber/projects/git/dotfiles",[580,8001,8002,8007],{},[585,8003,8006],{"href":8004,"rel":8005},"https://github.com/lukechilds/zsh-better-npm-completion",[589],"zsh-better-npm-completion",[577,8008,8009,8012],{},[580,8010,8011],{},"drücke Tab, um projektspezifische npm Task Vorschläge zu bekommen",[580,8013,8014,8015,99,8018,99,8021,1628,8024,8027],{},"z. B. bekomme ich im Screenshot die Tasks ",[50,8016,8017],{},"lint",[50,8019,8020],{},"lint:watch",[50,8022,8023],{},"start",[50,8025,8026],{},"start:watch"," vorgeschlagen",[580,8029,8030,8035,8040,8043],{},[585,8031,8034],{"href":8032,"rel":8033},"https://github.com/zsh-users/zsh-autosuggestions",[589],"zsh-autosuggestions",[577,8036,8037],{},[580,8038,8039],{},"fange an zu tippen und der Befehl wird ausgegraut vervollständigt",[8041,8042],"br",{},"drücke Pfeiltaste rechts und Enter, um ihn auszuführen",[580,8045,8046,8051,8056,8058],{},[585,8047,8050],{"href":8048,"rel":8049},"https://github.com/zsh-users/zsh-syntax-highlighting",[589],"zsh-syntax-highlighting",[577,8052,8053],{},[580,8054,8055],{},"hebt das auszuführende Programm farblich hervor",[8041,8057],{},"(nützlich um zu sehen, ob ich mich vertippt habe)",[18,8060,8061,8062,8067,8068,8073,8074,8077],{},"Als Prompt habe ich lange Zeit die ",[585,8063,8066],{"href":8064,"rel":8065},"https://github.com/sindresorhus/pure",[589],"pure prompt"," verwendet. Sie ist super schlank\nund zeigt nützliche Infos zum aktuellen git Repository an. Vor wenigen Wochen bin ich jedoch\nzur ",[585,8069,8072],{"href":8070,"rel":8071},"https://github.com/denysdovhan/spaceship-prompt",[589],"spaceship-prompt"," gewechselt. Einerseits um einfach mal wieder\nwas neues auszuprobieren, andererseits weil sie mir von Haus aus die Version der Programmiersprache des aktuellen\nProjektes anzeigt. Im Screenshot oben sieht man nach dem Wechsel in das ",[50,8075,8076],{},"workshop-maze-vr"," Verzeichnis den Hinweis, dass\ndas Projekt ein nodeJS Projekt ist und die bash session aktuell v10.9.0 verwendet. Neben nodeJS werden einige andere\nProgrammiersprachen unterstützt, Java leider nicht.",[18,8079,8080],{},"Die Anordnung der spaceship prompt Dinge und ob sie überhaupt angezeigt werden sollen, kann konfiguriert werden:",[43,8082,8084],{"className":45,"code":8083,"language":47,"meta":48,"style":48},"SPACESHIP_PROMPT_ORDER=(\n time # Time stamps section\n user # Username section\n dir # Current directory section\n git # Git section (git_branch + git_status)\n node # Node.js section\n docker # Docker section\n exec_time # Execution time\n line_sep # Line break\n jobs # Background jobs indicator\n exit_code # Exit code section\n char # Prompt character\n)\n",[50,8085,8086,8095,8103,8111,8119,8127,8135,8143,8151,8159,8167,8175,8183],{"__ignoreMap":48},[53,8087,8088,8091,8093],{"class":55,"line":56},[53,8089,8090],{"class":82},"SPACESHIP_PROMPT_ORDER",[53,8092,390],{"class":389},[53,8094,1139],{"class":82},[53,8096,8097,8100],{"class":55,"line":86},[53,8098,8099],{"class":389}," time",[53,8101,8102],{"class":3698}," # Time stamps section\n",[53,8104,8105,8108],{"class":55,"line":126},[53,8106,8107],{"class":63}," user",[53,8109,8110],{"class":3698}," # Username section\n",[53,8112,8113,8116],{"class":55,"line":163},[53,8114,8115],{"class":63}," dir",[53,8117,8118],{"class":3698}," # Current directory section\n",[53,8120,8121,8124],{"class":55,"line":186},[53,8122,8123],{"class":63}," git",[53,8125,8126],{"class":3698}," # Git section (git_branch + git_status)\n",[53,8128,8129,8132],{"class":55,"line":221},[53,8130,8131],{"class":63}," node",[53,8133,8134],{"class":3698}," # Node.js section\n",[53,8136,8137,8140],{"class":55,"line":242},[53,8138,8139],{"class":63}," docker",[53,8141,8142],{"class":3698}," # Docker section\n",[53,8144,8145,8148],{"class":55,"line":273},[53,8146,8147],{"class":63}," exec_time",[53,8149,8150],{"class":3698}," # Execution time\n",[53,8152,8153,8156],{"class":55,"line":279},[53,8154,8155],{"class":63}," line_sep",[53,8157,8158],{"class":3698}," # Line break\n",[53,8160,8161,8164],{"class":55,"line":496},[53,8162,8163],{"class":63}," jobs",[53,8165,8166],{"class":3698}," # Background jobs indicator\n",[53,8168,8169,8172],{"class":55,"line":503},[53,8170,8171],{"class":63}," exit_code",[53,8173,8174],{"class":3698}," # Exit code section\n",[53,8176,8177,8180],{"class":55,"line":509},[53,8178,8179],{"class":63}," char",[53,8181,8182],{"class":3698}," # Prompt character\n",[53,8184,8185],{"class":55,"line":515},[53,8186,685],{"class":82},[18,8188,8189],{},"und auch das Symbol mit welchem eine Zeile beginnen soll. Ich wollte weiterhin das Zeichen der pure prompt verwenden und\nhabe daher den default überschrieben:",[43,8191,8193],{"className":45,"code":8192,"language":47,"meta":48,"style":48},"SPACESHIP_CHAR_SYMBOL='❯ '\n",[50,8194,8195],{"__ignoreMap":48},[53,8196,8197,8200,8202],{"class":55,"line":56},[53,8198,8199],{"class":82},"SPACESHIP_CHAR_SYMBOL",[53,8201,390],{"class":389},[53,8203,8204],{"class":63},"'❯ '\n",[2207,8206,7939],{"id":8207},"git",[18,8209,8210,8211,1628,8214,8217,8218,99,8223,99,8228,99,8233,8238],{},"Git verwende ich überwiegend über die Konsole. Mit grafischen Programmen bin ich nie richtig warm geworden. Angefangen\nmit ",[50,8212,8213],{},"gitk",[50,8215,8216],{},"git gui"," auf einem Linux Rechner bin ich\nüber ",[585,8219,8222],{"href":8220,"rel":8221},"https://git-cola.github.io/",[589],"git-cola",[585,8224,8227],{"href":8225,"rel":8226},"https://www.sourcetreeapp.com/",[589],"SourceTree",[585,8229,8232],{"href":8230,"rel":8231},"https://www.gitkraken.com/",[589],"GitKraken",[585,8234,8237],{"href":8235,"rel":8236},"https://git-fork.com/",[589],"Fork","\nund noch ein paar andere gestolpert. Aber das Geklicke und der Kontextwechsel zwischen IDE und $Werkzeug haben mich\nimmer gestört. Auch visuell fand ich bis auf GitKraken und neuerdings Fork nichts ansprechend (ja, ist mir wichtig, hat\nman anfangs schon beim Dark Theme fürs Terminal bemerkt 🙄). Hauptsächlich hat mich aber das Nichtvorhandensein bzw. die\numständliche Handhabung des rebasen gestört. Das geht für mich am schnellsten auf der Konsole mit eigenen git aliasen.",[18,8240,8241],{},"Meine häufigsten Befehle sind:",[577,8243,8244,8261,8274,8287,8304],{},[580,8245,8246,8249],{},[50,8247,8248],{},"git up",[577,8250,8251,8258],{},[580,8252,8253,8254,8257],{},"hole den aktuellen Stand von ",[50,8255,8256],{},"remote"," mit einem rebase",[580,8259,8260],{},"lösche alle lokalen Branches die bereits gemerged wurden",[580,8262,8263,8266,8271,8273],{},[50,8264,8265],{},"git cm \"jetzt funktionierts wirklich!!!1elf\"",[577,8267,8268],{},[580,8269,8270],{},"comitte alle Änderungen mit der folgenden commit message",[8041,8272],{},"(inklusive neu angelegte Dateien)",[580,8275,8276,8279,8284,8286],{},[50,8277,8278],{},"git amend",[577,8280,8281],{},[580,8282,8283],{},"füge alle Änderungen dem letzten HEAD commit hinzu",[8041,8285],{},"(exklusive neu angelegte Dateien)",[580,8288,8289,8292],{},[50,8290,8291],{},"git ll",[577,8293,8294,8297],{},[580,8295,8296],{},"drucke eine Liste aller commit hashes und messages des aktuellen Branches ab HEAD",[580,8298,8299,8300,8303],{},"die Länge der Liste kann mit ",[50,8301,8302],{},"-42"," begrenzt werden",[580,8305,8306,1628,8309,8312],{},[50,8307,8308],{},"git fix",[50,8310,8311],{},"git ri",[577,8313,8314,8321],{},[580,8315,8316,8317,8320],{},"erzeuge einen ",[50,8318,8319],{},"fixup"," commit und räume die commits mit einem interactive rebase auf",[580,8322,8323],{},"workflow ist dann wie folgt:",[43,8325,8327],{"className":45,"code":8326,"language":47,"meta":48,"style":48},"❯ git ll -5\na52747d (HEAD -> master, origin/master, origin/HEAD) foo\nb858f05 bar\n4954b3d baz\n782d959 bum\n2b1c7c6 buff\n❯ git add .\n❯ git fix 782d959\n❯ git ri 782d959^\n",[50,8328,8329,8343,8365,8373,8381,8389,8397,8409,8421],{"__ignoreMap":48},[53,8330,8331,8334,8337,8340],{"class":55,"line":56},[53,8332,8333],{"class":59},"❯",[53,8335,8336],{"class":63}," git",[53,8338,8339],{"class":63}," ll",[53,8341,8342],{"class":89}," -5\n",[53,8344,8345,8348,8351,8353,8356,8359,8362],{"class":55,"line":86},[53,8346,8347],{"class":59},"a52747d",[53,8349,8350],{"class":82}," (HEAD -",[53,8352,1084],{"class":389},[53,8354,8355],{"class":63}," master,",[53,8357,8358],{"class":63}," origin/master,",[53,8360,8361],{"class":63}," origin/HEAD",[53,8363,8364],{"class":82},") foo\n",[53,8366,8367,8370],{"class":55,"line":126},[53,8368,8369],{"class":59},"b858f05",[53,8371,8372],{"class":63}," bar\n",[53,8374,8375,8378],{"class":55,"line":163},[53,8376,8377],{"class":59},"4954b3d",[53,8379,8380],{"class":63}," baz\n",[53,8382,8383,8386],{"class":55,"line":186},[53,8384,8385],{"class":59},"782d959",[53,8387,8388],{"class":63}," bum\n",[53,8390,8391,8394],{"class":55,"line":221},[53,8392,8393],{"class":59},"2b1c7c6",[53,8395,8396],{"class":63}," buff\n",[53,8398,8399,8401,8403,8406],{"class":55,"line":242},[53,8400,8333],{"class":59},[53,8402,8336],{"class":63},[53,8404,8405],{"class":63}," add",[53,8407,8408],{"class":63}," .\n",[53,8410,8411,8413,8415,8418],{"class":55,"line":273},[53,8412,8333],{"class":59},[53,8414,8336],{"class":63},[53,8416,8417],{"class":63}," fix",[53,8419,8420],{"class":63}," 782d959\n",[53,8422,8423,8425,8427,8430],{"class":55,"line":279},[53,8424,8333],{"class":59},[53,8426,8336],{"class":63},[53,8428,8429],{"class":63}," ri",[53,8431,8432],{"class":63}," 782d959^\n",[18,8434,8435,8436,8439],{},"Eigene git aliase können in der globalen git config unter ",[50,8437,8438],{},"~/.gitconfig"," abgelegt werden.",[18,8441,8442,8443,8446],{},"Da ich meine merge requests im Normalfall erst auf den master rebase bevor ich sie zum mergen freigebe, bin ich super\nglücklich über das ",[50,8444,8445],{},"rerere"," Feature von git. Ist das Feature aktiviert, merkt sich git beim rebasen, wie ein Konflikt\ngelöst wird. Kommt es erneut zu dem selben Konflikt, weiß git wie es diesen automatisiert zu lösen hat.",[43,8448,8450],{"className":45,"code":8449,"language":47,"meta":48,"style":48},"[rerere]\n enabled = true\n[alias]\n s = status\n st = stash\n co = checkout\n cob = checkout -b\n fix = commit --fixup\n ri = rebase -i --autosquash\n up = !git pull --rebase --prune && git bclean\n cm = !git add -A && git commit -m\n amend = commit -a --amend\n ll = !git --no-pager log --oneline --decorate\n bclean = \"!f() { branches=$(git branch --merged ${1-master} | grep -v \" ${1-master}$\"); [ -z \\\"$branches\\\" ] || git branch -d$branches; }; f\"\n",[50,8451,8452,8457,8467,8472,8482,8492,8502,8515,8528,8544,8571,8594,8609,8630],{"__ignoreMap":48},[53,8453,8454],{"class":55,"line":56},[53,8455,8456],{"class":82},"[rerere]\n",[53,8458,8459,8462,8464],{"class":55,"line":86},[53,8460,8461],{"class":59}," enabled",[53,8463,1245],{"class":63},[53,8465,8466],{"class":89}," true\n",[53,8468,8469],{"class":55,"line":126},[53,8470,8471],{"class":82},"[alias]\n",[53,8473,8474,8477,8479],{"class":55,"line":163},[53,8475,8476],{"class":59}," s",[53,8478,1245],{"class":63},[53,8480,8481],{"class":63}," status\n",[53,8483,8484,8487,8489],{"class":55,"line":186},[53,8485,8486],{"class":59}," st",[53,8488,1245],{"class":63},[53,8490,8491],{"class":63}," stash\n",[53,8493,8494,8497,8499],{"class":55,"line":221},[53,8495,8496],{"class":59}," co",[53,8498,1245],{"class":63},[53,8500,8501],{"class":63}," checkout\n",[53,8503,8504,8507,8509,8512],{"class":55,"line":242},[53,8505,8506],{"class":59}," cob",[53,8508,1245],{"class":63},[53,8510,8511],{"class":63}," checkout",[53,8513,8514],{"class":89}," -b\n",[53,8516,8517,8520,8522,8525],{"class":55,"line":273},[53,8518,8519],{"class":59}," fix",[53,8521,1245],{"class":63},[53,8523,8524],{"class":63}," commit",[53,8526,8527],{"class":89}," --fixup\n",[53,8529,8530,8533,8535,8538,8541],{"class":55,"line":279},[53,8531,8532],{"class":59}," ri",[53,8534,1245],{"class":63},[53,8536,8537],{"class":63}," rebase",[53,8539,8540],{"class":89}," -i",[53,8542,8543],{"class":89}," --autosquash\n",[53,8545,8546,8549,8551,8554,8557,8560,8563,8566,8568],{"class":55,"line":496},[53,8547,8548],{"class":59}," up",[53,8550,1245],{"class":63},[53,8552,8553],{"class":63}," !git",[53,8555,8556],{"class":63}," pull",[53,8558,8559],{"class":89}," --rebase",[53,8561,8562],{"class":89}," --prune",[53,8564,8565],{"class":82}," && ",[53,8567,8207],{"class":59},[53,8569,8570],{"class":63}," bclean\n",[53,8572,8573,8576,8578,8580,8582,8585,8587,8589,8591],{"class":55,"line":503},[53,8574,8575],{"class":59}," cm",[53,8577,1245],{"class":63},[53,8579,8553],{"class":63},[53,8581,8405],{"class":63},[53,8583,8584],{"class":89}," -A",[53,8586,8565],{"class":82},[53,8588,8207],{"class":59},[53,8590,8524],{"class":63},[53,8592,8593],{"class":89}," -m\n",[53,8595,8596,8599,8601,8603,8606],{"class":55,"line":509},[53,8597,8598],{"class":59}," amend",[53,8600,1245],{"class":63},[53,8602,8524],{"class":63},[53,8604,8605],{"class":89}," -a",[53,8607,8608],{"class":89}," --amend\n",[53,8610,8611,8614,8616,8618,8621,8624,8627],{"class":55,"line":515},[53,8612,8613],{"class":59}," ll",[53,8615,1245],{"class":63},[53,8617,8553],{"class":63},[53,8619,8620],{"class":89}," --no-pager",[53,8622,8623],{"class":63}," log",[53,8625,8626],{"class":89}," --oneline",[53,8628,8629],{"class":89}," --decorate\n",[53,8631,8632,8635,8637,8640,8642,8645,8648,8651,8654,8657,8660,8662,8665,8668,8671,8674,8676,8678,8680,8683,8685,8688,8690,8693,8695],{"class":55,"line":521},[53,8633,8634],{"class":59}," bclean",[53,8636,1245],{"class":63},[53,8638,8639],{"class":63}," \"!f() { branches=$(",[53,8641,8207],{"class":59},[53,8643,8644],{"class":63}," branch ",[53,8646,8647],{"class":89},"--merged",[53,8649,8650],{"class":89}," ${1",[53,8652,8653],{"class":63},"-",[53,8655,8656],{"class":82},"master",[53,8658,8659],{"class":89},"}",[53,8661,6324],{"class":389},[53,8663,8664],{"class":59}," grep",[53,8666,8667],{"class":89}," -v",[53,8669,8670],{"class":63}," \" ",[53,8672,8673],{"class":89},"${1",[53,8675,8653],{"class":63},[53,8677,8656],{"class":82},[53,8679,8659],{"class":89},[53,8681,8682],{"class":63},"$\"); [ -z ",[53,8684,6232],{"class":89},[53,8686,8687],{"class":82},"$branches",[53,8689,6232],{"class":89},[53,8691,8692],{"class":63}," ] || git branch -d",[53,8694,8687],{"class":82},[53,8696,8697],{"class":63},"; }; f\"\n",[2207,8699,7946],{"id":8700},"programme",[18,8702,8703],{},"Weitere Programme, die ich in meiner täglichen Arbeit nicht mehr missen möchte, sind:",[577,8705,8706,8729,8747,8761,8794,8815,8836],{},[580,8707,8708,8713,8714,8717],{},[585,8709,8712],{"href":8710,"rel":8711},"https://brew.sh/",[589],"homebrew"," ",[573,8715,8716],{},"(kommandozeile)",[577,8718,8719,8722],{},[580,8720,8721],{},"quasi DER Paketmanager für OSX",[580,8723,8724,8725,8728],{},"Programme installieren mit ",[50,8726,8727],{},"brew install FOO",". Schneller gehts nicht!",[580,8730,8731,8713,8736,8738],{},[585,8732,8735],{"href":8733,"rel":8734},"https://github.com/direnv/direnv",[589],"direnv",[573,8737,8716],{},[577,8739,8740],{},[580,8741,8742,8743,8746],{},"lege in einem Verzeichnis eine ",[50,8744,8745],{},".envrc"," Datei ab und definiere dort z. B. die Java oder NodeJS Version, oder auch\neine andere git E-Mail-Adresse, falls man hier eine andere als die globale verwenden möchte für commits",[580,8748,8749,8713,8754,8756],{},[585,8750,8753],{"href":8751,"rel":8752},"https://stedolan.github.io/jq/",[589],"jq",[573,8755,8716],{},[577,8757,8758],{},[580,8759,8760],{},"JSON Prozessor für die Kommandozeile",[580,8762,8763,8713,8768,8770],{},[585,8764,8767],{"href":8765,"rel":8766},"https://github.com/nodenv/nodenv",[589],"nodenv",[573,8769,8716],{},[577,8771,8772,8781],{},[580,8773,8774,8775,8780],{},"verwalte unterschiedliche Versionen von NodeJS (Alternative zu z.\nB. ",[585,8776,8779],{"href":8777,"rel":8778},"https://github.com/creationix/nvm",[589],"Node Version Manager (nvm)",")",[580,8782,8783,8784,8787,8788,8790,8791,8793],{},"Anfang des Jahres bin ich von ",[50,8785,8786],{},"nvm"," auf ",[50,8789,8767],{}," umgestiegen, weil letzteres die Shell beim Initialisieren nicht\nblockiert. ",[50,8792,8786],{}," braucht seine Zeit zum Laden von NodeJS",[580,8795,8796,8801],{},[585,8797,8800],{"href":8798,"rel":8799},"https://www.spectacleapp.com/",[589],"spectacle",[577,8802,8803,8806,8809,8812],{},[580,8804,8805],{},"verschiebe Fenster und ändere deren Größe mit deiner Tastatur",[580,8807,8808],{},"die Tastenkombinationen sind konfigurierbar",[580,8810,8811],{},"ich hatte einen UltraWide Monitor und habe so schnell mal drei Fenster in der Breite gedrittelt verteilt",[580,8813,8814],{},"Natürlich ist das auch auf einem normalen Laptop Display sinnvoll, Fenster ohne Maus rechts/links oder oben/unten\nausrichten zu können",[580,8816,8817,8822],{},[585,8818,8821],{"href":8819,"rel":8820},"https://github.com/Clipy/Clipy",[589],"clipy",[577,8823,8824,8833],{},[580,8825,8826,8827,8832],{},"wer die copy-paste aus IntelliJ lieb gewonnen hat; clipy macht das systemweit (die Alternative wäre, das\nPowerpack für ",[585,8828,8831],{"href":8829,"rel":8830},"https://www.alfredapp.com/",[589],"Alfred"," zu kaufen)",[580,8834,8835],{},"man achte auf Passwörter! Clipy bietet die Möglichkeit, bestimmte Programme zu ignorieren. Es ist ratsam dort\nKeepassX oder sonstigen Tresor einzutragen 😉",[580,8837,8838,8841],{},[27,8839,8840],{},"spotify",[577,8842,8843],{},[580,8844,8845],{},"Musik muss natürlich sein 🎧",[18,8847,8848],{},"Vielen Dank fürs Lesen (☞゚ヮ゚)☞ (smilie eingefügt mit clipy snippets)",[607,8850,8851],{},"html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sJ8bj, html code.shiki .sJ8bj{--shiki-default:#6A737D;--shiki-dark:#6A737D}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}",{"title":48,"searchDepth":86,"depth":86,"links":8853},[8854,8855,8856],{"id":7949,"depth":86,"text":7932},{"id":8207,"depth":86,"text":7939},{"id":8700,"depth":86,"text":7946},[613,614],"2018-11-09T12:41:34","Beim synyx Camp vor zwei Wochen haben wir uns unter anderem über das Setup unserer Entwicklungsumgebungen unterhalten.\\nIm Folgenden möchte ich kurz berichten, wie ich meine eingerichtet habe und welche Programme ich in meiner alltäglichen\\nArbeit nicht mehr missen möchte.","https://synyx.de/blog/wie-meine-entwicklungsumgebung-eingerichtet-ist/",{},"/blog/wie-meine-entwicklungsumgebung-eingerichtet-ist",{"title":7911,"description":7920},"blog/wie-meine-entwicklungsumgebung-eingerichtet-ist",[7611],"Beim synyx Camp vor zwei Wochen haben wir uns unter anderem über das Setup unserer Entwicklungsumgebungen unterhalten. Im Folgenden möchte ich kurz berichten, wie ich meine eingerichtet habe und welche Programme ich in meiner alltäglichen Arbeit nicht mehr missen möchte.","38z-kKuLlmfwcQfMnzrrf1gJHoh-qvNZsZ_skAURUrY",{"id":8869,"title":8870,"author":8871,"body":8873,"category":9240,"date":9241,"description":48,"extension":617,"link":9242,"meta":9243,"navigation":499,"path":9244,"seo":9245,"slug":8877,"stem":9246,"tags":9247,"teaser":9254,"__hash__":9255},"blog/blog/implementing-at-least-once-delivery-with-rabbitmq-and-springs-rabbittemplate.md","Implementing At Least Once Delivery With RabbitMQ and Spring’s RabbitTemplate",[8872],"jayasinghe",{"type":11,"value":8874,"toc":9226},[8875,8878,8882,8885,8905,8908,8911,8915,8918,8924,8927,8938,8941,8952,8956,8959,8963,8966,8969,8973,8976,8983,8986,8990,8993,8999,9002,9006,9013,9016,9031,9087,9091,9094,9097,9171,9174,9181,9185,9201,9211,9215,9218,9221,9224],[14,8876,8870],{"id":8877},"implementing-at-least-once-delivery-with-rabbitmq-and-springs-rabbittemplate",[2207,8879,8881],{"id":8880},"message-delivery-characteristics","Message Delivery Characteristics",[18,8883,8884],{},"First some theory about delivery semantics in messaging systems. When a system wants to communicate via a message broker\nthe developer needs a clear understanding of the delivery semantics. At first one needs to know if and how often a\nmessage will be delivered to the broker (and potential consumers):",[577,8886,8887,8893,8899],{},[580,8888,8889,8892],{},[573,8890,8891],{},"At most once"," – the message is delivered at most once but also not at all.",[580,8894,8895,8898],{},[573,8896,8897],{},"At least once"," – the message guaranteed to be delivered but can be delivered multiple times.",[580,8900,8901,8904],{},[573,8902,8903],{},"Exactly once"," – the message is guaranteed to be delivered exactly once.",[18,8906,8907],{},"The second point that is – at least at first sight – important for message consumers is the ordering of messages.\nOrdering in a messaging context means that messages arrive at the consumer in the same order as they have been sent by a\ngiven producer.",[18,8909,8910],{},"Even if some brokers claim to guarantee “exactly once in order delivery” it’s recommended to not take the order of\nincoming messages for granted. Because a) things can get messy in distributed systems and b) you might not want to rely\non semantics of your current broker too much since you’ll end up with another broker in the future.",[2207,8912,8914],{"id":8913},"amqp-rabbitmq-send-semantics","AMQP / RabbitMQ Send Semantics",[18,8916,8917],{},"In my current customer project we are using RabbitMQ as a message broker to implement an event-driven archticure. Since\nwe are building our services and frontend apps with Spring Boot we can use the very convenient RabbitTemplate of Spring\nAMQP.",[18,8919,8920,8923],{},[573,8921,8922],{},"Note:"," AMQP is the messaging protocol. One implemantation is RabbitMQ. In this article I might use both terms\ninterchangeably.",[18,8925,8926],{},"Once the correct connection paramters for your RabbitMQ paramters are provided to your applicaiton context your\napplication will automatically connect to the RabbitMQ server. When your code needs to send messages via AMQP you just\nneed to autowire an instance of RabbitTemplate and call the send method like this:",[43,8928,8932],{"className":8929,"code":8930,"language":8931,"meta":48,"style":48},"language-abap shiki shiki-themes github-light github-dark","rabbitTemplate.send(\"your_exchange\", \"a_routing_key\", yourMessageObject);\n","abap",[50,8933,8934],{"__ignoreMap":48},[53,8935,8936],{"class":55,"line":56},[53,8937,8930],{},[18,8939,8940],{},"If the RabbitMQ server is available the call just returns and the message seems to be delivered. However, as you are\nleaving your JVM’s process you enter the realm of distributed systems. Virtually everything can go wrong now. 😉",[18,8942,8943,8944,8947,8948],{},"So, comparing the above mentioned delivery semantics with the observations from our RabbitMQ usage in Spring Boot,\nRabbitMQ guarantees ",[573,8945,8946],{},"at least once delivery"," if not configured elsewise. Also, there is only one small cornercase where\nmessage ordering is\nguaranteed: ",[585,8949,8950],{"href":8950,"rel":8951},"https://www.rabbitmq.com/semantics.html#ordering",[589],[2207,8953,8955],{"id":8954},"the-dirty-details","The Dirty Details",[18,8957,8958],{},"When looking at the send semantics of the RabbitTemplate in Spring Boot we can seperate into two topics:",[649,8960,8962],{"id":8961},"passing-the-notwork","Passing The Notwork",[18,8964,8965],{},"The first is about reaching the RabbitMQ server at all. So, the network connection can be shaky or not available. Or the\nhost is reachable but the RabbitMQ service on the host is down. In that case the RabbitTemplate will throw an exception\nto the caller. From this point on the caller code can handle the failed message delivery conciously.",[18,8967,8968],{},"From an API interaction point of view the message can be considered as delivered (to the broker) if no exception is\nthrown by the send call. So this is quite comparable to an RestTemplate / HttpClient call. However, as you’ll see later\nthings can still go wrong even if the call returned successfully.",[649,8970,8972],{"id":8971},"delivery-inside-the-message-broker","Delivery Inside the Message Broker",[18,8974,8975],{},"When you use an AMQP message broker things are a bit different than when interacting via HTTP. This brings us to the\nsecond topic: Transferring the message to the broker is only the first part of a longer process. The broker tries to\nroute messages to the queues that are bound to the exchange on which the message is sent.",[18,8977,8978,8979,8982],{},"As mentiond – the broker ",[573,8980,8981],{},"tries"," to route the messages. If no queue is bound the message is just rushing through the\nexchange and is lost in the end. Another case is that if the target exchange does not exist (for whatever reason) the\nbroker returns an HTTP like error code: 404. On the client side the send() method call has already returned. The\nRabbitMQ template has no chance to notify it’s caller of the failed delivery. The only thing left is writing an error to\nthe application’s log.",[18,8984,8985],{},"There are probably more cases that match this problem space. One can categorize them like “received message but could\nnot deliver it to any queues”. The important point for all problems in this category is that once the\nRabbitTemplate.send() method returned the client code cannot be sure whether the message has really been delivered.",[2207,8987,8989],{"id":8988},"choose-your-weapon","Choose Your Weapon",[18,8991,8992],{},"For most of your scenarios this behavior is perfectly OK. At least with many of our cases we emit event messages in a\n“fire and forget” style. The code is written without knowledge of and excpectation towards any consumers.\nInconsistencies caused by not delivered messages need to be detected and handled seperatly.",[18,8994,8995,8996,986],{},"There are however scenarios where you really want to be sure that a given message has a) reached the message broker and\nb) been routed – the above mentioned ",[573,8997,8998],{},"at least onće delivery",[18,9000,9001],{},"In that case the AMQP protocol, RabbitMQ and Spring’s RabbitTemplate offer some (configuration) tools to help you.",[649,9003,9005],{"id":9004},"enable-and-handle-publisher-confirms-callbacks","Enable and Handle Publisher-Confirms Callbacks",[18,9007,9008,9009],{},"The publisher confirms callback helps the application developer to have his code to be notified when the RabbitMQ server\nhas received a message and has delivered it to the desired exchange. Technical details can be found\nhere: ",[585,9010,9011],{"href":9011,"rel":9012},"https://www.rabbitmq.com/confirms.html",[589],[18,9014,9015],{},"To enable the publisher confirms callbacks two things must be done:",[577,9017,9018,9028],{},[580,9019,9020,9021,9024,9025,9027],{},"The property ",[50,9022,9023],{},"spring.rabbitmq.publisher-confirms"," must be set to ",[50,9026,408],{},". Please notice that this property has global\neffect. Even if there is more than one RabbitTemplate configured.",[580,9029,9030],{},"Since the RabbitMQ server cannot return the cofirmation synchonously some callback code has to be registered so it can\nbe called once the confirm has arrived. The callback can be registered during the Creation of the RabbitTemplate.\nInstead of using the autoconfigured RabbitTemplate one needs to configure the RabbitTemplate manually and then provide\na new Instance of RabbitTemplate.CofirmCallback to the RabbitTemplate::setConfirmCallback method. One of the arguments\nof the confirm method of that class is the boolean ‘ack’ if it’s ‘true’ the publisher has confirmed that the message\nhas been received. If it’s false something went wrong.",[43,9032,9034],{"className":288,"code":9033,"language":290,"meta":48,"style":48},"template.setConfirmCallback(new RabbitTemplate.ConfirmCallback() {\n\n @Override\n public void confirm(CorrelationData correlationData, boolean ack, String cause) {\n if(ack) {\n applicationEventPublisher.publish(new PublishConfirmedEvent(correlationData.getId());\n } else {\n applicationEventPublisher.publish(new PublishNotConfirmedEvent(correlationData.getId(), cause);\n }\n }\n});\n",[50,9035,9036,9041,9045,9050,9055,9060,9065,9070,9075,9079,9083],{"__ignoreMap":48},[53,9037,9038],{"class":55,"line":56},[53,9039,9040],{},"template.setConfirmCallback(new RabbitTemplate.ConfirmCallback() {\n",[53,9042,9043],{"class":55,"line":86},[53,9044,500],{"emptyLinePlaceholder":499},[53,9046,9047],{"class":55,"line":126},[53,9048,9049],{}," @Override\n",[53,9051,9052],{"class":55,"line":163},[53,9053,9054],{}," public void confirm(CorrelationData correlationData, boolean ack, String cause) {\n",[53,9056,9057],{"class":55,"line":186},[53,9058,9059],{}," if(ack) {\n",[53,9061,9062],{"class":55,"line":221},[53,9063,9064],{}," applicationEventPublisher.publish(new PublishConfirmedEvent(correlationData.getId());\n",[53,9066,9067],{"class":55,"line":242},[53,9068,9069],{}," } else {\n",[53,9071,9072],{"class":55,"line":273},[53,9073,9074],{}," applicationEventPublisher.publish(new PublishNotConfirmedEvent(correlationData.getId(), cause);\n",[53,9076,9077],{"class":55,"line":279},[53,9078,3242],{},[53,9080,9081],{"class":55,"line":496},[53,9082,860],{},[53,9084,9085],{"class":55,"line":503},[53,9086,7422],{},[649,9088,9090],{"id":9089},"handle-return-callbacks","Handle Return Callbacks",[18,9092,9093],{},"In contrast to the Publisher Confirms Callback the Return Callback is not activated by a configuration property but is\ndirectly set on the RabbitTemplate object with setMandatory(true). So this has only effect for the one instance of the\nRabbitTemplate. If you want to have different behavior to this respect you can configure several RabbitTemplates with\ndifferent Qualifiers.",[18,9095,9096],{},"By activating the mandatory flag the sent messages indicate that the sender expects the message to be routed to a queue.\nIf the message is not routed the RabbitMQ server needs to return the whole message to the sender with a reply code.",[43,9098,9100],{"className":288,"code":9099,"language":290,"meta":48,"style":48},"template.setMandatory(true);\ntemplate.setReturnCallback(new RabbitTemplate.ReturnCallback() {\n\n @Override\n public void returnedMessage(Message message, int replyCode, String replyText, String exchange,\n String routingKey) {\n\n if (replyCode == AMQP.NO_ROUTE) {\n applicationEventPublisher.publish(new NoRouteEvent(message.getMessageId(),\n replyText, replyCode, exchange, routingKey);\n } else if(...) {\n // more code for other cases goes here\n }\n }\n});\n",[50,9101,9102,9107,9112,9116,9120,9125,9130,9134,9139,9144,9149,9154,9159,9163,9167],{"__ignoreMap":48},[53,9103,9104],{"class":55,"line":56},[53,9105,9106],{},"template.setMandatory(true);\n",[53,9108,9109],{"class":55,"line":86},[53,9110,9111],{},"template.setReturnCallback(new RabbitTemplate.ReturnCallback() {\n",[53,9113,9114],{"class":55,"line":126},[53,9115,500],{"emptyLinePlaceholder":499},[53,9117,9118],{"class":55,"line":163},[53,9119,9049],{},[53,9121,9122],{"class":55,"line":186},[53,9123,9124],{}," public void returnedMessage(Message message, int replyCode, String replyText, String exchange,\n",[53,9126,9127],{"class":55,"line":221},[53,9128,9129],{}," String routingKey) {\n",[53,9131,9132],{"class":55,"line":242},[53,9133,500],{"emptyLinePlaceholder":499},[53,9135,9136],{"class":55,"line":273},[53,9137,9138],{}," if (replyCode == AMQP.NO_ROUTE) {\n",[53,9140,9141],{"class":55,"line":279},[53,9142,9143],{}," applicationEventPublisher.publish(new NoRouteEvent(message.getMessageId(),\n",[53,9145,9146],{"class":55,"line":496},[53,9147,9148],{}," replyText, replyCode, exchange, routingKey);\n",[53,9150,9151],{"class":55,"line":503},[53,9152,9153],{}," } else if(...) {\n",[53,9155,9156],{"class":55,"line":509},[53,9157,9158],{}," // more code for other cases goes here\n",[53,9160,9161],{"class":55,"line":515},[53,9162,3242],{},[53,9164,9165],{"class":55,"line":521},[53,9166,860],{},[53,9168,9169],{"class":55,"line":527},[53,9170,7422],{},[18,9172,9173],{},"Message delivery/handling inside the message broker can fail for several reasons. The constants in the\ncom.rabbitmq.client.AMQP interface can give a hint to what can go wrong. Besides obvious codes like NOT_FOUND or\nACCESS_REFUSED the NO_ROUTE code is special since other errors are mostly caused by configuration problems. NO_ROUTE is\nreturned is when no queue (with a matching routing key) is bound to the target exchange and the mandatory flag is true.\nIt is the message producer’s way to express it’s wish for ‘at least once’ semantics.",[18,9175,9176,9177],{},"Also check the official\ndocu: ",[585,9178,9179],{"href":9179,"rel":9180},"https://www.rabbitmq.com/reliability.html#producer",[589],[649,9182,9184],{"id":9183},"identifying-message-deliveries-in-global-callbacks","Identifying Message Deliveries in Global Callbacks",[18,9186,9187,9188,9191,9192,9194,9195,9198,9199,986],{},"Both of the above mentioned techniques have one thing in common. They apply globally or at least to the scope to one\nRabbitTemplate instance. So it is important to set the ",[573,9189,9190],{},"messageId"," of the sent message with some value that can later be\nused to match the context from which the message has been sent. When you get a publisher confirms callback the initially\nset ",[573,9193,9190],{}," is now the ",[573,9196,9197],{},"correllationData.Id"," and if you get a return callback you have the initial message at hand\nwhere you can directly access the ",[573,9200,9190],{},[18,9202,9203,9204,9207,9208,986],{},"If you have different parts of your application communicating via RabbitMQ and want to be able to distinguish the\nmessages from each other it helps to prefix the id with some meaning ful token like ",[573,9205,9206],{},"orders"," or ",[573,9209,9210],{},"notifications",[2207,9212,9214],{"id":9213},"handling-callbacks-from-the-rabbittemplate","Handling Callbacks From the RabbitTemplate",[18,9216,9217],{},"It turned out to be a useful pattern to publish meaningful Application Events from the above mentioned callbacks and\nthen listen to these in some other application code. This helps to avoid tangle between your different domains and the\npotential central RabbitMQ configuration.",[18,9219,9220],{},"Also, you can easily handle the events in an async manner if needed. Be aware that this callbacks will potentially arive\nvery quickly and also do not rely on correct ordering. It helps to implement a state machine for handling async events\nwithout fixed order in a reliable manner.",[18,9222,9223],{},"also, to be more resilient against crashes (of the JVM) between delivery attempts you might want to persist your\ndelivery attempts (in a database).",[607,9225,989],{},{"title":48,"searchDepth":86,"depth":86,"links":9227},[9228,9229,9230,9234,9239],{"id":8880,"depth":86,"text":8881},{"id":8913,"depth":86,"text":8914},{"id":8954,"depth":86,"text":8955,"children":9231},[9232,9233],{"id":8961,"depth":126,"text":8962},{"id":8971,"depth":126,"text":8972},{"id":8988,"depth":86,"text":8989,"children":9235},[9236,9237,9238],{"id":9004,"depth":126,"text":9005},{"id":9089,"depth":126,"text":9090},{"id":9183,"depth":126,"text":9184},{"id":9213,"depth":86,"text":9214},[613,614],"2018-07-28T14:44:10","https://synyx.de/blog/implementing-at-least-once-delivery-with-rabbitmq-and-springs-rabbittemplate/",{},"/blog/implementing-at-least-once-delivery-with-rabbitmq-and-springs-rabbittemplate",{"title":8870,"description":48},"blog/implementing-at-least-once-delivery-with-rabbitmq-and-springs-rabbittemplate",[9248,9249,9250,9251,9252,1010,9253],"amqp","boot","distributed","messaging","rabbitmq","systems","Message Delivery Characteristics First some theory about delivery semantics in messaging systems. When a system wants to communicate via a message broker the developer needs a clear understanding of the delivery semantics. At first one needs to know if and how often a message will be delivered to the broker (and potential consumers): At most once - the message is delivered at most once but also not at all.","Tt3Ogxa-9Ig57HD1cc-ElR-Uard7wk79ygQUyUrtxF0",{"id":9257,"title":9258,"author":9259,"body":9260,"category":9409,"date":9410,"description":48,"extension":617,"link":9411,"meta":9412,"navigation":499,"path":9413,"seo":9414,"slug":9264,"stem":9415,"tags":9416,"teaser":9418,"__hash__":9419},"blog/blog/code-coverage-with-significance.md","Code Coverage with significance",[7799],{"type":11,"value":9261,"toc":9399},[9262,9265,9269,9272,9275,9278,9281,9284,9287,9294,9305,9309,9324,9330,9333,9337,9340,9344,9347,9351,9354,9370,9376,9379,9383,9386,9389,9393,9396],[14,9263,9258],{"id":9264},"code-coverage-with-significance",[2207,9266,9268],{"id":9267},"_839-what-does-that-even-mean","83,9% – what does that even mean?",[18,9270,9271],{},"Conversations about unit test coverage usually sound like this:",[18,9273,9274],{},"A: “What’s your coverage?”",[18,9276,9277],{},"B: “About 83,9%”",[18,9279,9280],{},"C: “Meh. Solid.”",[18,9282,9283],{},"A: “Solid? That’s incredibly high!”",[18,9285,9286],{},"D: “Ours is 40% but we have a lot of generated code so it’s still high.”",[18,9288,9289,9290,9293],{},"This shows that the perception of code coverage is highly subjective and most of the time does not have the informative\nvalue that a precise percentage indicator like “83,9%” suggests. As soon as you have a portion of code in your project,\nthat is not covered on purpose (like generated code) but is still included in the coverage analysis, you lose the\nsignificance of the coverage indicator. You’re only able to tell that ",[27,9291,9292],{},"some"," of your code is covered and maybe you can\ndeviate from the percentage that your coverage is “high” or “low”. You can tell that your coverage is going up or down,\nbut not even that is dependable as it does not take into account how much of the new code is purposefully not tested.\nHow many tests are missing? What is the risk that you take on every new release? You can not provide a satisfactory\nanswer to these questions.",[18,9295,9296,9297,9304],{},"There are divergent opinions on what “good” code coverage is, varying somewhere between 60% and 90% from my experience.\nI claim: ",[573,9298,9299,9300,9303],{},"Your unit test coverage is good when ",[27,9301,9302],{},"100% of the code you want to test"," is covered by unit tests."," And in\nmy opinion this is measurable and doable and makes the coverage indicator significant again.",[2207,9306,9308],{"id":9307},"purposefully-untested-code","Purposefully untested code",[18,9310,9311,9312,9317,9318,9323],{},"In every larger project there are some portions of code that you do not want to test with unit tests. The first step\ntowards a meaningful code coverage indicator is to identify these portions. Then coverage tools\nlike ",[585,9313,9316],{"href":9314,"rel":9315},"https://www.eclemma.org/jacoco/",[589],"Jacoco"," and reporting tools like ",[585,9319,9322],{"href":9320,"rel":9321},"https://www.sonarqube.org/",[589],"SonarQube"," can help\nyou exclude these portions from the coverage report – usually by defining exclusion patterns like in the picture below.",[18,9325,9326],{},[2223,9327],{"alt":9328,"src":9329},"Code Coverage Exclusions","https://media.synyx.de/uploads/2019/04/exclusions.png",[18,9331,9332],{},"Following are some examples of code you don’t want to test and how you can exclude it to shave off some percent of your\ncoverage’s insignificance.",[649,9334,9336],{"id":9335},"code-of-3rd-party-libraries","Code of 3rd party libraries",[18,9338,9339],{},"Just stating the obvious. As changes on this code are not in your cognizance, neither are its tests. In a normal project\nsetup 3rd party libraries are not included in the coverage report by default.",[649,9341,9343],{"id":9342},"test-code","Test code",[18,9345,9346],{},"Usually code, that is intended for testing purposes only, is separated from the production code which makes it easily\nexcludable. There may be some exceptions like mock objects or testing infrastructure that exist near the production\ncode, in which case you should specifically exclude them from your coverage report.",[649,9348,9350],{"id":9349},"generated-code","Generated code",[18,9352,9353],{},"Changes in this code are done by a code generator. You should assume that the generated code is correct, when the\ngenerator works correctly. That said, if you wrote the generator yourself, you obviously still have to test the\ngenerator. Often the exclusion of generated code is easy as it is usually located in a separated package and you can\ndefine a simple exclusion pattern for it.",[18,9355,9356,9357,9362,9363,9369],{},"Sometimes it is a bit more complicated, for example when you use a convenience framework\nlike ",[585,9358,9361],{"href":9359,"rel":9360},"https://projectlombok.org/",[589],"Lombok"," in Java, that generates accessors, constructors and the like for you. It\nresults in bytecode where handwritten and generated code are present within the same class, which makes it nearly\nimpossible to only exclude the generated methods. Most of the time Lombok is used in objects like DTOs, JPA entity\nclasses etc., that are only holding some property fields and no business logic. One possible solution here is to define\nthese whole classes as “not to be tested” and make them easily identifiable with a consistent naming convention or by\nmoving them into similar packages so you can use patterns like “",[27,9364,4422,9365,9368],{},[573,9366,9367],{},"Dto.","” or “","/dto/**” to exclude them\ncompletely.",[18,9371,9372],{},[2223,9373],{"alt":9374,"src":9375},"Code-Beispiel","https://media.synyx.de/uploads/2018/06/lombok_fail.png",[18,9377,9378],{},"A little off topic: When you don’t use Lombok and you write your getters, setters and constructors/builders yourself,\nthat does not mean you have to unit test them explicitly. When all of your business code is unit tested then all needed\ngetters, setters and constructors should be tested implicitly and show up as covered in the coverage report. If they\ndon’t then it means that either the tests for your business code are not complete or that the getters/setters are not\nactually used and you should just delete them. Unfortunately things like equals() and hashcode() are a whole different\nstory because of their high cyclomatic complexity.",[649,9380,9382],{"id":9381},"code-better-covered-by-other-types-of-tests","Code better covered by other types of tests",[18,9384,9385],{},"A good example for code that is better tested by non-unit tests are repository classes that access your database.\nWriting unit tests for them is possible but cumbersome because they usually use a lot of framework API that has to be\nmocked. Also repository classes typically don’t contain much logic besides database access so unit tests don’t test that\nmuch. IMO it is better to write tests that actually integrate with a (more or less) real database and do without unit\ntests.",[18,9387,9388],{},"Another example are classes like Spring configurations annotated with @Configuration. They are not intended to be tested\nby unit tests. Their purpose is to construct a working application context which is better checked by good integration\ntests. This is why I usually exclude “**/*Config.*” from the unit test coverage report.",[2207,9390,9392],{"id":9391},"the-new-meaning-of-839","The new meaning of 83,9%",[18,9394,9395],{},"When you put a little effort into defining the correct exclusions for your code coverage you are rewarded with an\nactually meaningful coverage percentage. It means that when you have 83.9% code coverage you are actually missing\n16.1% of unit tests that you should write – that’s a valuable piece of information! It is also possible that you can\nactually reach 100% unit test coverage! How cool is that?",[18,9397,9398],{},"Well to be honest – realistically it is still nearly impossible to reach 100% in a larger project. There are always\nthings like private constructors to prevent instantiation or code paths that can not be reached in tests because some\nstatic framework dependencies can not be mocked and probably a dozen other reasons that prevent you from providing\nreasonable unit tests in some weird cases. But if that leads to “only” 98% test coverage at least you know that exactly\n2% of your production code has the risk to break without you noticing and you can consciously accept, assess and\ncommunicate that risk.",{"title":48,"searchDepth":86,"depth":86,"links":9400},[9401,9402,9408],{"id":9267,"depth":86,"text":9268},{"id":9307,"depth":86,"text":9308,"children":9403},[9404,9405,9406,9407],{"id":9335,"depth":126,"text":9336},{"id":9342,"depth":126,"text":9343},{"id":9349,"depth":126,"text":9350},{"id":9381,"depth":126,"text":9382},{"id":9391,"depth":86,"text":9392},[613,614],"2018-06-11T14:53:35","https://synyx.de/blog/code-coverage-with-significance/",{},"/blog/code-coverage-with-significance",{"title":9258,"description":48},"blog/code-coverage-with-significance",[9417],"testing","83,9% - what does that even mean? Conversations about unit test coverage usually sound like this: A: “What’s your coverage?” B: “About 83,9%” C: “Meh. Solid.” A: “Solid? That’s incredibly high!” D: “Ours is 40% but we have a lot of generated code so it’s still high.” This shows that the perception of code coverage is highly subjective and most of the time does not have the informative value that a precise percentage indicator like '","nDo5Wxhzv_lZrfVYpMfwdgPVRYj486SqZEOYOsmqy5o",{"id":9421,"title":9422,"author":9423,"body":9424,"category":9590,"date":9591,"description":9592,"extension":617,"link":9593,"meta":9594,"navigation":499,"path":9595,"seo":9596,"slug":9428,"stem":9597,"tags":9598,"teaser":9602,"__hash__":9603},"blog/blog/breakout-session-how-to-prototype-your-enterprise-project-hackathon-like.md","Breakout Session – how to prototype your enterprise project hackathon-like",[7799],{"type":11,"value":9425,"toc":9583},[9426,9429,9432,9438,9442,9449,9452,9456,9459,9462,9465,9468,9473,9476,9479,9482,9488,9492,9495,9501,9504,9507,9510,9513,9516,9519,9525,9529,9532,9552,9558,9563,9566,9569,9573,9580],[14,9427,9422],{"id":9428},"breakout-session-how-to-prototype-your-enterprise-project-hackathon-like",[18,9430,9431],{},"This is the story of my team creating something awesome within one day. It begins in November of 2017 at “Hack your\nOffice”, a 24-hour hackathon hosted in cooperation by my employer synyx and our customer dm-drogerie markt. Although\nit was an excellent hackathon, this is not the day I am refering to but it was on this day when the idea was born.\nSeveral of my team members from dm where participating in the hackathon, even Matthäus – one of our product owners –\njoined us. He didn’t contribute anything to the code but he was absorbed in the electrifying atmosphere of everybody\nbeing excited to hack something together. The amazing thing about hackathons is that everybody has the intrinsic\nmotivation to be creative and to work hard to produce something awesome in the short time that is available, while\nhaving fun! Matthäus formed the vision to experience something similar with our own, regular development team from the\ndm office.",[18,9433,9434],{},[2223,9435],{"alt":9436,"src":9437},"Logo Hack Your Office","https://media.synyx.de/uploads/2019/04/HACKYOUROFFICE-768x1024.jpg",[2207,9439,9441],{"id":9440},"the-vision","The Vision",[18,9443,9444,9445,9448],{},"The vision was plain and simple: Create a working POC-like product that ",[27,9446,9447],{},"adds real value and runs in production","\nwithin a single day while working in a hackathon-like atmosphere.",[18,9450,9451],{},"On this day we would not work in our usual office but together in a remote location, focussing as a team on this one\ngoal alone. We called it “breakout session”. Luckily we had a suitable use case in the pipeline. The digital receipt\nsystem of dm (“e-Bon”), that was reaching its end-of-life in 2018 for several reasons, had to be reimplemented. We\ndefined the ambitious goal that within a day we should be able to walk downstairs to the real dm store with our phone,\nactually buy something and see the genuine receipt created by the check-out counter displayed in our dm customer\naccount on our phone. Everything with production systems and production data.",[2207,9453,9455],{"id":9454},"preparation-for-bonbon","Preparation for BonBon",[18,9457,9458],{},"Although it was intended as a one-day thing we didn’t want to go in blindly. Some preparation was necessary to reduce\nthe risk of failure in our one-day adventure.",[18,9460,9461],{},"First we made up a small concept about basic things like where the data had to come from, what system our product will\nbe running on, which systems would have to talk to each other. The actual business use case was worked out in more\ndetail by our product owners.",[18,9463,9464],{},"We knew we would be developing a new microservice so we needed a system to run it on. At dm there is a provisioning\nplatform in place that can pull up a fully configured virtual machine cluster on multiple stages including firewall\nrules, load balancing and everything within half an hour. However we did this beforehand because experience tells us\nthat this really cool technology doesn’t always run smoothly on first try due to its massive complexity.",[18,9466,9467],{},"The receipt data from the ~9000 checkout counters of all dm stores in europe is stored in real time on some central\nbackend system so we prepared a route piping the data in real time into a topic on one of our Apache Kafka clusters. We\nlimited the data to the last two days, which should be more than enough for our POC.",[18,9469,9470],{},[2223,9471],{"alt":7972,"src":9472},"https://media.synyx.de/uploads/2019/04/json.jpg",[18,9474,9475],{},"We assembled the team for our breakout session having cross-functionality in mind as we had to master tasks in backend\ndevelopment, data processing, operations and mobile development on that day. Our regular development team (usually in\ncharge of the customer backend of dm) formed the backbone of the breakout team. We already incorporate backend expertise\nand extensive knowledge in operations and many areas of data processing thanks to our devops culture. Unfortunately we\nare a bit thin on frontend and mobile development so we asked colleagues from the mobile team to join us. One guy from\nanalytics helped us setting up Kafka and the server operations team was (as always) on stand by in the office and\ninformed about our endeavour in case anything complicated would go wrong with the infrastructure.",[18,9477,9478],{},"Finally we needed a location to retreat to. One of my teammates arranged three rooms at the office of his employer\ndiva-e Netpioneer, which is beautifully located at the city park in Karlsruhe – quickly accessible by bike, train or\ncar for all participants. He also made reservations for lunch at the nearby pizza place – one more thing of importance\nthat we would not have to bother about on the breakout day.",[18,9480,9481],{},"The discussion about the upcoming solution’s name was held some day during lunch. We called it BonBon.",[18,9483,9484],{},[2223,9485],{"alt":9486,"src":9487},"Bonbon","https://media.synyx.de/uploads/2019/04/bonbon-768x512.jpg",[2207,9489,9491],{"id":9490},"the-session","The Session",[18,9493,9494],{},"Finally the day arrived on a Wednesday in March. Everbody was hyped about it, though some of us a bit more sceptically\nhyped. Ten developers, two product owners, two interested guests from the business department and one scrum master were\ngathering in the kitchen at diva-e, having the first of countless coffees and enjoying breakfast pretzels eager to\nabuse our keyboards for some dirty hacking. Our product owners started the session at 9:00 with a short kick-off\nintroducing the details of the use case and we quickly forged the requirements into small user stories that would\neventually add up to the desired POC.",[18,9496,9497],{},[2223,9498],{"alt":9499,"src":9500},"Whiteboard mit Post-its","https://media.synyx.de/uploads/2019/04/board_gimp2.jpg",[18,9502,9503],{},"Twenty minutes in the user stories were ready and the signal to commence the hacking was given. Suddenly the excitement\nunloaded in a stream of productivity! Small teams were vibrantly working to put the pieces together. Creating\nrepositories, initializing a Spring Boot app, testing out infrastructure all around within the first few minutes! Thanks\nto modern development tools the results came in quickly. Within one hour the first API was useable in a freshly created\nBackend Service. Shortly after the new born iPhone app was capable to authenticate users at the checkout counter via\nQR-code. Before lunch the Kafka data trickeled in at the backend. A rudimentary build and release process was in place.\nThis didn’t feel like work, it was the hackathon atmosphere transferred to our everyday activity.",[18,9505,9506],{},"The mode we were working in was mainly in small teams of 2-3 developers tackling the different tasks in pair\nprogramming. Every hour we did a quick stand up to sync our progress. Small successes were celebrated, problems\nannounced to find a fast solution using the creative power of the whole team. The product owners where always within\nreach, researching business details, answering arising questions and giving feedback on the progress. Our scrum master\nwas scurrying around the team during development enabling communication, removing impediments, moderating stand-ups,\ngiving impulses.",[18,9508,9509],{},"Unfortunately the lunch break did not work out that well. Visiting a pizza place with a group of 15 unexpectedly turned\nout to take a lot longer than anticipated. A huge chunk of our precious time and focus was lost to some delicious slices\nof Italian culinary art.",[18,9511,9512],{},"During the time between lunch and the contemplated end at 17:00 we managed to develop almost everything that was\nnecessary to complete our goal. There was one problem left with processing the huge amount of real time data and one\nconnectivity problem between the app and the prod backend. Some team members committed to two more hours of problem\nsolving which payed off big time at the end of the day!",[18,9514,9515],{},"Around 19:00 a colleague went downstairs to the local dm store and got some apple-cinnamon cereal. He went to the\ncounter and held his phone to the QR Code scanner to let the cash identify his account. After payment he opened the\nBonBon app and the receipt of the purchase was displayed with article description, product pictures, prices and\neverything. Achievement unlocked!",[18,9517,9518],{},"Unfortunately we missed the opportunity to take a picture of this moment but the receipt in the final app prototype\nlooked like this:",[18,9520,9521],{},[2223,9522],{"alt":9523,"src":9524},"App-Beispiel","https://media.synyx.de/uploads/2018/04/app.jpg",[2207,9526,9528],{"id":9527},"hacking-around-problems","Hacking around problems",[18,9530,9531],{},"To enable this quick success we had to make our hands a little dirty by cutting corners on otherwise indispensable\npractices of our everyday work. (Except security, of course. Access to the APIs and the data was secured the whole\ntime.) The whole day we were running in full-on prototype-POC mode. We agreed in advance that all produced code had to\nwork quickly and didn’t have to be beautiful or readable at all. Tests were omitted from the beginning. Copy-paste from\nother projects or Stackoverflow for quick results was encouraged. Upcoming obstacles were circumnavigated with the\nquickest workaround instead of looking for the “correct” solution:",[577,9533,9534,9537,9540,9543,9546,9549],{},[580,9535,9536],{},"Connectivity from the app to the BonBon production system was created last minute with some obscure temporary routing\nthrough the release gateway into the DMZ.",[580,9538,9539],{},"Deployment was done by copying the jar artifact to the production vm “on foot”.",[580,9541,9542],{},"The receipt data was stored in memory – without backup. We just left out the persistence layer to save time.",[580,9544,9545],{},"The huge amount of data accumulated in 2 days hit us unexpectedly. We just shrugged it off and hoped the system\nperformance would endure the load.",[580,9547,9548],{},"Debugging problems by frantically adding new logs on the new production system was common practice",[580,9550,9551],{},"… and some more",[18,9553,9554],{},[2223,9555],{"alt":9556,"src":9557},"Teamarbeit","https://media.synyx.de/uploads/2018/04/team.jpg",[18,9559,9560],{},[2223,9561],{"alt":48,"src":9562},"https://synyx.de/blog/2018-04-17-breakout-session/team.jpg",[18,9564,9565],{},"Operating this way we ended up with a working proof of concept covering everything we aimed for on this day, which was a\nhuge success! But to be clear: The thing we produced is obviously far from being actually production ready. Some parts\nof the code need refactoring to meet our quality standards. Test coverage has to be provided everywhere on all layers of\nthe test pyramid. Data has to be persisted and resilience measures have to be taken to achieve acceptable robustness of\nthe solution. A proper deployment process has to be established, maybe with containerization. Continous integration and\ncontinous delivery have to be provided. The app has to be refurbished, approved and published. And so on.",[18,9567,9568],{},"Some aspects of the electronic receipt use case were not possible to implement on the breakout day – like the original\nidea to avoid paper waste for the receipt. To stop the checkout counter from printing the receipt we would have to\nchange and deploy the software running on it, which is maintained by another team, rolled out countrywide and is not\nintegrated into a CD lifecycle – impossible to achieve in one day.",[2207,9570,9572],{"id":9571},"the-aftermath","The aftermath",[18,9574,9575,9576,9579],{},"Ultimately the day turned out to be exceptionally valuable both as team building excercise and as an effective method of\nstarting a project. We proved our idea to be feasable in a real production example. We have a working code base, working\nartifacts and infrastructure, that can be built upon and expanded. This will be pursued in the form of normal backlog\nitems during our usual sprint-to-sprint routine. The team gained a decent boost of motivation and team spirit – not\nonly the developers but also the product owners, who worked in support of the team and the use case the whole day, and\nour scrum master, who enabled focus and kept our spirits high with his subtle (",[573,9577,9578],{},"cough",") positive nature.",[18,9581,9582],{},"We perceived the breakout session experiment as a successful, wholesome experience and can recommend it as method for\nPOCs, project starts, feature kick-offs to every team that wants to try something new.",{"title":48,"searchDepth":86,"depth":86,"links":9584},[9585,9586,9587,9588,9589],{"id":9440,"depth":86,"text":9441},{"id":9454,"depth":86,"text":9455},{"id":9490,"depth":86,"text":9491},{"id":9527,"depth":86,"text":9528},{"id":9571,"depth":86,"text":9572},[7906,613,614],"2018-04-17T11:22:24","This is the story of my team creating something awesome within one day. It begins in November of 2017 at “Hack your\\nOffice”, a 24-hour hackathon hosted in cooperation by my employer synyx and our customer dm-drogerie markt. Although\\nit was an excellent hackathon, this is not the day I am refering to but it was on this day when the idea was born.\\nSeveral of my team members from dm where participating in the hackathon, even Matthäus – one of our product owners –\\njoined us. He didn’t contribute anything to the code but he was absorbed in the electrifying atmosphere of everybody\\nbeing excited to hack something together. The amazing thing about hackathons is that everybody has the intrinsic\\nmotivation to be creative and to work hard to produce something awesome in the short time that is available, while\\nhaving fun! Matthäus formed the vision to experience something similar with our own, regular development team from the\\ndm office.","https://synyx.de/blog/breakout-session-how-to-prototype-your-enterprise-project-hackathon-like/",{},"/blog/breakout-session-how-to-prototype-your-enterprise-project-hackathon-like",{"title":9422,"description":9431},"blog/breakout-session-how-to-prototype-your-enterprise-project-hackathon-like",[7906,9599,9600,9601],"hackathon","poc","prototyping","This is the story of my team creating something awesome within one day. It begins in November of 2017 at 'Hack your Office', a 24-hour hackathon hosted in cooperation by my employer synyx and our customer dm-drogerie markt. Although it was an excellent hackathon, this is not the day I am refering to but it was on this day when the idea was born. Several of my team members from dm where participating in the hackathon, even Matthäus - one of our product owners - joined us.","Blcw4uT2FNpf2rgpclc4clsllmntm5TBcVUMzvt5oRY",{"id":9605,"title":9606,"author":9607,"body":9609,"category":10052,"date":10053,"description":10054,"extension":617,"link":10055,"meta":10056,"navigation":499,"path":10057,"seo":10058,"slug":9613,"stem":10060,"tags":10061,"teaser":10064,"__hash__":10065},"blog/blog/an-image-slideshow-shortcode-for-hugo.md","An Image Slideshow Shortcode For Hugo",[9608],"sommer",{"type":11,"value":9610,"toc":10050},[9611,9614,9623,9626,9629,9632,9637,9640,9643,9687,9690,9853,9856,9859,9862,9865,9998,10001,10007,10013,10019,10025,10031,10037,10043,10048],[14,9612,9606],{"id":9613},"an-image-slideshow-shortcode-for-hugo",[18,9615,9616,9617,9622],{},"Creating static web sites with ",[585,9618,9621],{"href":9619,"rel":9620},"http://gohugo.io/",[589],"Hugo"," is fun and fast but providing a convenient shortcode to\nsmoothly cross-fade an unknown number of images in a blogpost gets a bit tricky… So let’s go!",[18,9624,9625],{},"Shortcodes are Hugo template snippets which can be used inside a markdown document with optional named or unamed\nparameters.",[18,9627,9628],{},"The snippet is for example called slide.html and has to be placed in a folder called shortcodes in the layout directory\nof the Hugo site.",[18,9630,9631],{},"To slide some images we need to know the folder’s location. So the shortcode in the markdown document will look like\nthis:",[18,9633,9634],{},[50,9635,9636],{},"{{``\u003C slide \"/assets/img/\">``}}",[18,9638,9639],{},"The shortcode itself starts with reading out the parameter and 2 constants defining the fade-in time and the number of\nseconds for how long an image is visible. The localFolder may require some string manipulations to match the local\ndirectory.",[18,9641,9642],{},"Read the folder, count the number of files in it and calculate the duration of the whole css animation.",[43,9644,9646],{"className":288,"code":9645,"language":290,"meta":48,"style":48},"{{ $fadein := 2 }}\n{{ $visible := 4 }}\n{{ $param := .Get 0 }}\n{{ $localFolder := printf \"/static%s/\" $param }}\n\n{{ $files := sort (readDir $localFolder) }}\n{{ $numberOfFiles := len $files }}\n{{ $animationDuration := mul (add $fadein $visible) $numberOfFiles }}\n",[50,9647,9648,9653,9658,9663,9668,9672,9677,9682],{"__ignoreMap":48},[53,9649,9650],{"class":55,"line":56},[53,9651,9652],{},"{{ $fadein := 2 }}\n",[53,9654,9655],{"class":55,"line":86},[53,9656,9657],{},"{{ $visible := 4 }}\n",[53,9659,9660],{"class":55,"line":126},[53,9661,9662],{},"{{ $param := .Get 0 }}\n",[53,9664,9665],{"class":55,"line":163},[53,9666,9667],{},"{{ $localFolder := printf \"/static%s/\" $param }}\n",[53,9669,9670],{"class":55,"line":186},[53,9671,500],{"emptyLinePlaceholder":499},[53,9673,9674],{"class":55,"line":221},[53,9675,9676],{},"{{ $files := sort (readDir $localFolder) }}\n",[53,9678,9679],{"class":55,"line":242},[53,9680,9681],{},"{{ $numberOfFiles := len $files }}\n",[53,9683,9684],{"class":55,"line":273},[53,9685,9686],{},"{{ $animationDuration := mul (add $fadein $visible) $numberOfFiles }}\n",[18,9688,9689],{},"Now create the slider div, iterate through the files and generate an img tag for each file.",[43,9691,9693],{"className":288,"code":9692,"language":290,"meta":48,"style":48}," \u003Cstyle>\n .slider {\n padding-bottom: 70%;\n width: 100%;\n height: 0;\n position: relative;\n }\n .slider img {\n width: 100%;\n height: auto;\n position: absolute;\n opacity: 0;\n animation: slide infinite {{$animationDuration}}s;\n }\n\n {{ $x := div 100.0 $animationDuration }}\n {{ $p0 := 0 }}\n {{ $p1 := mul $x $fadein }}\n {{ $p2 := mul $x (add $fadein $visible) }}\n {{ $p3 := mul $x (add (add $fadein $visible) $fadein) }}\n\n @keyframes slide {\n {{ $p0 }}% { opacity: 0; }\n {{ $p1 }}% { opacity: 1; }\n {{ $p2 }}% { opacity: 1; }\n {{ $p3 }}% { opacity: 0; }\n }\n\n {{ range $index, $value := $files }}\n {{ $delay := mul (add $fadein $visible) $index }}\n .slider img:nth-child({{add $index 1}}){animation-delay:{{$delay}}s;}\n {{ end }}\n\u003C/style>\n",[50,9694,9695,9700,9705,9710,9715,9720,9725,9729,9734,9738,9743,9748,9753,9758,9762,9766,9771,9776,9781,9786,9791,9795,9800,9805,9810,9815,9820,9824,9828,9833,9838,9843,9848],{"__ignoreMap":48},[53,9696,9697],{"class":55,"line":56},[53,9698,9699],{}," \u003Cstyle>\n",[53,9701,9702],{"class":55,"line":86},[53,9703,9704],{}," .slider {\n",[53,9706,9707],{"class":55,"line":126},[53,9708,9709],{}," padding-bottom: 70%;\n",[53,9711,9712],{"class":55,"line":163},[53,9713,9714],{}," width: 100%;\n",[53,9716,9717],{"class":55,"line":186},[53,9718,9719],{}," height: 0;\n",[53,9721,9722],{"class":55,"line":221},[53,9723,9724],{}," position: relative;\n",[53,9726,9727],{"class":55,"line":242},[53,9728,860],{},[53,9730,9731],{"class":55,"line":273},[53,9732,9733],{}," .slider img {\n",[53,9735,9736],{"class":55,"line":279},[53,9737,9714],{},[53,9739,9740],{"class":55,"line":496},[53,9741,9742],{}," height: auto;\n",[53,9744,9745],{"class":55,"line":503},[53,9746,9747],{}," position: absolute;\n",[53,9749,9750],{"class":55,"line":509},[53,9751,9752],{}," opacity: 0;\n",[53,9754,9755],{"class":55,"line":515},[53,9756,9757],{}," animation: slide infinite {{$animationDuration}}s;\n",[53,9759,9760],{"class":55,"line":521},[53,9761,860],{},[53,9763,9764],{"class":55,"line":527},[53,9765,500],{"emptyLinePlaceholder":499},[53,9767,9768],{"class":55,"line":533},[53,9769,9770],{}," {{ $x := div 100.0 $animationDuration }}\n",[53,9772,9773],{"class":55,"line":539},[53,9774,9775],{}," {{ $p0 := 0 }}\n",[53,9777,9778],{"class":55,"line":545},[53,9779,9780],{}," {{ $p1 := mul $x $fadein }}\n",[53,9782,9783],{"class":55,"line":2414},[53,9784,9785],{}," {{ $p2 := mul $x (add $fadein $visible) }}\n",[53,9787,9788],{"class":55,"line":2426},[53,9789,9790],{}," {{ $p3 := mul $x (add (add $fadein $visible) $fadein) }}\n",[53,9792,9793],{"class":55,"line":2438},[53,9794,500],{"emptyLinePlaceholder":499},[53,9796,9797],{"class":55,"line":2451},[53,9798,9799],{}," @keyframes slide {\n",[53,9801,9802],{"class":55,"line":2459},[53,9803,9804],{}," {{ $p0 }}% { opacity: 0; }\n",[53,9806,9807],{"class":55,"line":2470},[53,9808,9809],{}," {{ $p1 }}% { opacity: 1; }\n",[53,9811,9812],{"class":55,"line":2476},[53,9813,9814],{}," {{ $p2 }}% { opacity: 1; }\n",[53,9816,9817],{"class":55,"line":2484},[53,9818,9819],{}," {{ $p3 }}% { opacity: 0; }\n",[53,9821,9822],{"class":55,"line":2490},[53,9823,860],{},[53,9825,9826],{"class":55,"line":2495},[53,9827,500],{"emptyLinePlaceholder":499},[53,9829,9830],{"class":55,"line":2507},[53,9831,9832],{}," {{ range $index, $value := $files }}\n",[53,9834,9835],{"class":55,"line":2528},[53,9836,9837],{}," {{ $delay := mul (add $fadein $visible) $index }}\n",[53,9839,9840],{"class":55,"line":2539},[53,9841,9842],{}," .slider img:nth-child({{add $index 1}}){animation-delay:{{$delay}}s;}\n",[53,9844,9845],{"class":55,"line":2551},[53,9846,9847],{}," {{ end }}\n",[53,9849,9850],{"class":55,"line":2562},[53,9851,9852],{},"\u003C/style>\n",[18,9854,9855],{},"The last part is the css animation.",[18,9857,9858],{},"All images will get an opacity of 0 and an infinite animation with a keyframe rule binding.",[18,9860,9861],{},"Before we define the keyframe rule we calculate some percentages for it. If you are not familiar with these keyframe\ncalculations the code may not explain everything but at least may give you a hint on how the percentages are calculated.",[18,9863,9864],{},"The individual animation delay for every image is the last step to fade one image into the next one.",[43,9866,9868],{"className":288,"code":9867,"language":290,"meta":48,"style":48},"\n .slider {\n padding-bottom: 70%;\n width: 100%;\n height: 0;\n position: relative;\n }\n .slider img {\n width: 100%;\n height: auto;\n position: absolute;\n opacity: 0;\n animation: slide infinite {{$animationDuration}}s;\n }\n\n {{ $x := div 100.0 $animationDuration }}\n {{ $p0 := 0 }}\n {{ $p1 := mul $x $fadein }}\n {{ $p2 := mul $x (add $fadein $visible) }}\n {{ $p3 := mul $x (add (add $fadein $visible) $fadein) }}\n\n @keyframes slide {\n {{ $p0 }}% { opacity: 0; }\n {{ $p1 }}% { opacity: 1; }\n {{ $p2 }}% { opacity: 1; }\n {{ $p3 }}% { opacity: 0; }\n }\n\n {{ range $index, $value := $files }}\n {{ $delay := mul (add $fadein $visible) $index }}\n .slider img:nth-child({{add $index 1}}){animation-delay:{{$delay}}s;}\n {{ end }}\n\n",[50,9869,9870,9874,9878,9882,9886,9890,9894,9898,9902,9906,9910,9914,9918,9922,9926,9930,9934,9938,9942,9946,9950,9954,9958,9962,9966,9970,9974,9978,9982,9986,9990,9994],{"__ignoreMap":48},[53,9871,9872],{"class":55,"line":56},[53,9873,500],{"emptyLinePlaceholder":499},[53,9875,9876],{"class":55,"line":86},[53,9877,9704],{},[53,9879,9880],{"class":55,"line":126},[53,9881,9709],{},[53,9883,9884],{"class":55,"line":163},[53,9885,9714],{},[53,9887,9888],{"class":55,"line":186},[53,9889,9719],{},[53,9891,9892],{"class":55,"line":221},[53,9893,9724],{},[53,9895,9896],{"class":55,"line":242},[53,9897,860],{},[53,9899,9900],{"class":55,"line":273},[53,9901,9733],{},[53,9903,9904],{"class":55,"line":279},[53,9905,9714],{},[53,9907,9908],{"class":55,"line":496},[53,9909,9742],{},[53,9911,9912],{"class":55,"line":503},[53,9913,9747],{},[53,9915,9916],{"class":55,"line":509},[53,9917,9752],{},[53,9919,9920],{"class":55,"line":515},[53,9921,9757],{},[53,9923,9924],{"class":55,"line":521},[53,9925,860],{},[53,9927,9928],{"class":55,"line":527},[53,9929,500],{"emptyLinePlaceholder":499},[53,9931,9932],{"class":55,"line":533},[53,9933,9770],{},[53,9935,9936],{"class":55,"line":539},[53,9937,9775],{},[53,9939,9940],{"class":55,"line":545},[53,9941,9780],{},[53,9943,9944],{"class":55,"line":2414},[53,9945,9785],{},[53,9947,9948],{"class":55,"line":2426},[53,9949,9790],{},[53,9951,9952],{"class":55,"line":2438},[53,9953,500],{"emptyLinePlaceholder":499},[53,9955,9956],{"class":55,"line":2451},[53,9957,9799],{},[53,9959,9960],{"class":55,"line":2459},[53,9961,9804],{},[53,9963,9964],{"class":55,"line":2470},[53,9965,9809],{},[53,9967,9968],{"class":55,"line":2476},[53,9969,9814],{},[53,9971,9972],{"class":55,"line":2484},[53,9973,9819],{},[53,9975,9976],{"class":55,"line":2490},[53,9977,860],{},[53,9979,9980],{"class":55,"line":2495},[53,9981,500],{"emptyLinePlaceholder":499},[53,9983,9984],{"class":55,"line":2507},[53,9985,9832],{},[53,9987,9988],{"class":55,"line":2528},[53,9989,9837],{},[53,9991,9992],{"class":55,"line":2539},[53,9993,9842],{},[53,9995,9996],{"class":55,"line":2551},[53,9997,9847],{},[18,9999,10000],{},"The following shortcode in action has an additional JavaScript navigation and some prefixed css properties to support a\nwider range of browsers.",[18,10002,10003],{},[2223,10004],{"alt":10005,"src":10006},"Synema Logo Leinwand","https://media.synyx.de/uploads/2019/03/01_synema-768x512.jpg",[18,10008,10009],{},[2223,10010],{"alt":10011,"src":10012},"Synema Buffet","https://media.synyx.de/uploads/2019/03/IMG_5784-768x512.jpg",[18,10014,10015],{},[2223,10016],{"alt":10017,"src":10018},"Snapschot vom Synema event","https://media.synyx.de/uploads/2019/03/IMG_5834-768x512.jpg",[18,10020,10021],{},[2223,10022],{"alt":10023,"src":10024},"Synema Kino eingang","https://media.synyx.de/uploads/2019/03/IMG_5835-768x512.jpg",[18,10026,10027],{},[2223,10028],{"alt":10029,"src":10030},"Synema Star Wars Impression","https://media.synyx.de/uploads/2019/03/IMG_5862-768x512.jpg",[18,10032,10033],{},[2223,10034],{"alt":10035,"src":10036},"Publikum beim Synema event","https://media.synyx.de/uploads/2019/03/IMG_5847-768x512.jpg",[18,10038,10039],{},[2223,10040],{"alt":10041,"src":10042},"Thomas Kraft beim Synema event","https://media.synyx.de/uploads/2019/03/IMG_5954-768x512.jpg",[18,10044,10045],{},[2223,10046],{"alt":48,"src":10047},"https://media.synyx.de/uploads/2019/03/IMG_6016-768x512.jpg",[607,10049,989],{},{"title":48,"searchDepth":86,"depth":86,"links":10051},[],[613,614],"2018-04-12T11:45:46","Creating static web sites with Hugo is fun and fast but providing a convenient shortcode to\\nsmoothly cross-fade an unknown number of images in a blogpost gets a bit tricky… So let’s go!","https://synyx.de/blog/an-image-slideshow-shortcode-for-hugo/",{},"/blog/an-image-slideshow-shortcode-for-hugo",{"title":9606,"description":10059},"Creating static web sites with Hugo is fun and fast but providing a convenient shortcode to\nsmoothly cross-fade an unknown number of images in a blogpost gets a bit tricky… So let’s go!","blog/an-image-slideshow-shortcode-for-hugo",[10062,10063],"css","hugo","Creating static web sites with Hugo is fun and fast but providing a convenient shortcode to smoothly cross-fade an unknown number of images in a blogpost gets a bit tricky… So let’s go!","-zIyT5c6EfwcvonPd1c3lRBKIjuDg5Z-B0BHOQAqfzg",{"id":10067,"title":10068,"author":10069,"body":10071,"category":10880,"date":10881,"description":10882,"extension":617,"link":10883,"meta":10884,"navigation":499,"path":10885,"seo":10886,"slug":10075,"stem":10887,"tags":10888,"teaser":10894,"__hash__":10895},"blog/blog/using-travis-ci-to-deploy-to-maven-repositories-and-github-releases.md","Using Travis CI to deploy to Maven repositories and GitHub Releases",[10070],"larrasz",{"type":11,"value":10072,"toc":10869},[10073,10076,10079,10083,10096,10121,10127,10157,10182,10361,10365,10372,10426,10430,10441,10445,10473,10565,10569,10576,10623,10642,10655,10659,10665,10676,10689,10774,10778,10781,10787,10821,10828,10831,10834,10867],[14,10074,10068],{"id":10075},"using-travis-ci-to-deploy-to-maven-repositories-and-github-releases",[18,10077,10078],{},"This post outlines the steps needed to simultaneously deploy to Maven repositories and to GitHub Releases. Every time a\ntagged commit is pushed, a Travis CI build will be triggered automatically and start the release process. This blog post\nuses Sonatype Nexus as an example for a Maven repository manager.",[2207,10080,10082],{"id":10081},"preparing-github-releases","Preparing GitHub Releases",[18,10084,10085,10086,10091,10092,10095],{},"Sergey Mashkov has written a ",[585,10087,10090],{"href":10088,"rel":10089},"https://github.com/cy6erGn0m/github-release-plugin",[589],"Maven plugin"," that allows us to create\na new release on our project’s releases page and upload our build artifacts to a release. The following sections\ndescribe how we need to configure our ",[50,10093,10094],{},"pom.xml"," in order to use this plugin.",[18,10097,10098,10099,10102,10103,10106,10107,10110,10111,10114,10115,10120],{},"The plugin uses the ",[50,10100,10101],{},"scm"," settings to find out for which project the new release should be created. Right now, there’s\nstill a bug in the plugin which restricts the format for our git URIs. The only working format is\n",[50,10104,10105],{},"scm:git:git@github.com:...",". Neither ",[50,10108,10109],{},"scm:git:https://github.com/...","nor ",[50,10112,10113],{},"scm:git:ssh://git@github.com/...","work, but\na ",[585,10116,10119],{"href":10117,"rel":10118},"https://github.com/cy6erGn0m/github-release-plugin/pull/2",[589],"pull request"," has been created that adds this\nfunctionality.",[18,10122,10123,10124,10126],{},"So add an ",[50,10125,10101],{},"section to your pom that looks like this:",[43,10128,10130],{"className":3792,"code":10129,"language":3794,"meta":48,"style":48},"\u003Cscm>\n \u003Curl>https://github.com/example/project\u003C/url>\n \u003Cconnection>scm:git:git@github.com:example/project.git\u003C/connection>\n \u003CdeveloperConnection>scm:git:git@github.com:example/project.git\u003C/developerConnection>\n\u003C/scm>\n\n\n",[50,10131,10132,10137,10142,10147,10152],{"__ignoreMap":48},[53,10133,10134],{"class":55,"line":56},[53,10135,10136],{},"\u003Cscm>\n",[53,10138,10139],{"class":55,"line":86},[53,10140,10141],{}," \u003Curl>https://github.com/example/project\u003C/url>\n",[53,10143,10144],{"class":55,"line":126},[53,10145,10146],{}," \u003Cconnection>scm:git:git@github.com:example/project.git\u003C/connection>\n",[53,10148,10149],{"class":55,"line":163},[53,10150,10151],{}," \u003CdeveloperConnection>scm:git:git@github.com:example/project.git\u003C/developerConnection>\n",[53,10153,10154],{"class":55,"line":186},[53,10155,10156],{},"\u003C/scm>\n",[18,10158,10159,10160,10165,10166,10169,10170,10175,10176,10181],{},"The second step is to include the plugin in our pom. Right now the plugin is only available\nfrom ",[585,10161,10164],{"href":10162,"rel":10163},"http://dl.bintray.com/cy6ergn0m/maven",[589],"bintray.com"," so we need to add it as a plugin repository. We only want to\ncreate a new release on GitHub when we are building a new release. Hence we configure the plugin in an extra release\nprofile section. This leads to the plugin being executed only if Maven is started with ",[50,10167,10168],{},"-Prelease","and only if the deploy\ngoal is invoked. For more information on how to configure the plugin options please refer to\nits ",[585,10171,10174],{"href":10172,"rel":10173},"https://github.com/cy6erGn0m/github-release-plugin#plugin-configuration-options",[589],"documentation"," and\nthe ",[585,10177,10180],{"href":10178,"rel":10179},"https://synyx.de/blog/2018-01-24-travisci-github-releases/?page=3#pitfalls",[589],"pitfalls"," below.",[43,10183,10185],{"className":3792,"code":10184,"language":3794,"meta":48,"style":48},"\u003Cprofiles>\n ...\n \u003Cprofile>\n \u003Cid>release\u003C/id>\n \u003CpluginRepositories>\n \u003CpluginRepository>\n \u003Cid>bintray-cy6ergn0m-maven\u003C/id>\n \u003Cname>bintray-plugins\u003C/name>\n \u003Curl>http://dl.bintray.com/cy6ergn0m/maven\u003C/url>\n \u003C/pluginRepository>\n \u003C/pluginRepositories>\n \u003Cbuild>\n \u003Cplugins>\n \u003Cplugin>\n \u003CgroupId>cy.github\u003C/groupId>\n \u003CartifactId>github-release-plugin\u003C/artifactId>\n \u003Cversion>0.5.1\u003C/version>\n \u003Cconfiguration>\n \u003CtagName>${project.version}\u003C/tagName>\n \u003CreleaseTitle>${project.artifactId}-${project.version}\u003C/releaseTitle>\n \u003CserverId>github\u003C/serverId>\n \u003C/configuration>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cgoals>\n \u003Cgoal>gh-upload\u003C/goal>\n \u003C/goals>\n \u003Cphase>deploy\u003C/phase>\n \u003C/execution>\n \u003C/executions>\n \u003C/plugin>\n \u003C/plugins>\n \u003C/build>\n \u003C/profile>\n\u003C/profiles>\n",[50,10186,10187,10192,10196,10201,10206,10211,10216,10221,10226,10231,10236,10241,10246,10251,10256,10261,10266,10271,10276,10281,10286,10291,10296,10301,10306,10311,10316,10321,10326,10331,10336,10341,10346,10351,10356],{"__ignoreMap":48},[53,10188,10189],{"class":55,"line":56},[53,10190,10191],{},"\u003Cprofiles>\n",[53,10193,10194],{"class":55,"line":86},[53,10195,322],{},[53,10197,10198],{"class":55,"line":126},[53,10199,10200],{}," \u003Cprofile>\n",[53,10202,10203],{"class":55,"line":163},[53,10204,10205],{}," \u003Cid>release\u003C/id>\n",[53,10207,10208],{"class":55,"line":186},[53,10209,10210],{}," \u003CpluginRepositories>\n",[53,10212,10213],{"class":55,"line":221},[53,10214,10215],{}," \u003CpluginRepository>\n",[53,10217,10218],{"class":55,"line":242},[53,10219,10220],{}," \u003Cid>bintray-cy6ergn0m-maven\u003C/id>\n",[53,10222,10223],{"class":55,"line":273},[53,10224,10225],{}," \u003Cname>bintray-plugins\u003C/name>\n",[53,10227,10228],{"class":55,"line":279},[53,10229,10230],{}," \u003Curl>http://dl.bintray.com/cy6ergn0m/maven\u003C/url>\n",[53,10232,10233],{"class":55,"line":496},[53,10234,10235],{}," \u003C/pluginRepository>\n",[53,10237,10238],{"class":55,"line":503},[53,10239,10240],{}," \u003C/pluginRepositories>\n",[53,10242,10243],{"class":55,"line":509},[53,10244,10245],{}," \u003Cbuild>\n",[53,10247,10248],{"class":55,"line":515},[53,10249,10250],{}," \u003Cplugins>\n",[53,10252,10253],{"class":55,"line":521},[53,10254,10255],{}," \u003Cplugin>\n",[53,10257,10258],{"class":55,"line":527},[53,10259,10260],{}," \u003CgroupId>cy.github\u003C/groupId>\n",[53,10262,10263],{"class":55,"line":533},[53,10264,10265],{}," \u003CartifactId>github-release-plugin\u003C/artifactId>\n",[53,10267,10268],{"class":55,"line":539},[53,10269,10270],{}," \u003Cversion>0.5.1\u003C/version>\n",[53,10272,10273],{"class":55,"line":545},[53,10274,10275],{}," \u003Cconfiguration>\n",[53,10277,10278],{"class":55,"line":2414},[53,10279,10280],{}," \u003CtagName>${project.version}\u003C/tagName>\n",[53,10282,10283],{"class":55,"line":2426},[53,10284,10285],{}," \u003CreleaseTitle>${project.artifactId}-${project.version}\u003C/releaseTitle>\n",[53,10287,10288],{"class":55,"line":2438},[53,10289,10290],{}," \u003CserverId>github\u003C/serverId>\n",[53,10292,10293],{"class":55,"line":2451},[53,10294,10295],{}," \u003C/configuration>\n",[53,10297,10298],{"class":55,"line":2459},[53,10299,10300],{}," \u003Cexecutions>\n",[53,10302,10303],{"class":55,"line":2470},[53,10304,10305],{}," \u003Cexecution>\n",[53,10307,10308],{"class":55,"line":2476},[53,10309,10310],{}," \u003Cgoals>\n",[53,10312,10313],{"class":55,"line":2484},[53,10314,10315],{}," \u003Cgoal>gh-upload\u003C/goal>\n",[53,10317,10318],{"class":55,"line":2490},[53,10319,10320],{}," \u003C/goals>\n",[53,10322,10323],{"class":55,"line":2495},[53,10324,10325],{}," \u003Cphase>deploy\u003C/phase>\n",[53,10327,10328],{"class":55,"line":2507},[53,10329,10330],{}," \u003C/execution>\n",[53,10332,10333],{"class":55,"line":2528},[53,10334,10335],{}," \u003C/executions>\n",[53,10337,10338],{"class":55,"line":2539},[53,10339,10340],{}," \u003C/plugin>\n",[53,10342,10343],{"class":55,"line":2551},[53,10344,10345],{}," \u003C/plugins>\n",[53,10347,10348],{"class":55,"line":2562},[53,10349,10350],{}," \u003C/build>\n",[53,10352,10353],{"class":55,"line":2573},[53,10354,10355],{}," \u003C/profile>\n",[53,10357,10358],{"class":55,"line":2585},[53,10359,10360],{},"\u003C/profiles>\n",[2207,10362,10364],{"id":10363},"preparing-for-maven-releases","Preparing for Maven releases",[18,10366,10367,10368,10371],{},"If you don’t already have a repository where you want to deploy to, you need to create a release and a snapshot\nrepository and add them to ",[50,10369,10370],{},"distributionManagement",".You might also want to create a separate user that has access only\nto your target repositories. This user will be used to upload the releases.",[43,10373,10375],{"className":3792,"code":10374,"language":3794,"meta":48,"style":48},"\u003CdistributionManagement>\n \u003Crepository>\n \u003Cid>oss\u003C/id>\n \u003Curl>https://nexus.example.com/content/repositories/oss-releases\u003C/url>\n \u003C/repository>\n \u003CsnapshotRepository>\n \u003Cid>oss\u003C/id>\n \u003Curl>https://nexus.example.com/content/repositories/oss-snapshots\u003C/url>\n \u003C/snapshotRepository>\n\u003C/distributionManagement>\n",[50,10376,10377,10382,10387,10392,10397,10402,10407,10411,10416,10421],{"__ignoreMap":48},[53,10378,10379],{"class":55,"line":56},[53,10380,10381],{},"\u003CdistributionManagement>\n",[53,10383,10384],{"class":55,"line":86},[53,10385,10386],{}," \u003Crepository>\n",[53,10388,10389],{"class":55,"line":126},[53,10390,10391],{}," \u003Cid>oss\u003C/id>\n",[53,10393,10394],{"class":55,"line":163},[53,10395,10396],{}," \u003Curl>https://nexus.example.com/content/repositories/oss-releases\u003C/url>\n",[53,10398,10399],{"class":55,"line":186},[53,10400,10401],{}," \u003C/repository>\n",[53,10403,10404],{"class":55,"line":221},[53,10405,10406],{}," \u003CsnapshotRepository>\n",[53,10408,10409],{"class":55,"line":242},[53,10410,10391],{},[53,10412,10413],{"class":55,"line":273},[53,10414,10415],{}," \u003Curl>https://nexus.example.com/content/repositories/oss-snapshots\u003C/url>\n",[53,10417,10418],{"class":55,"line":279},[53,10419,10420],{}," \u003C/snapshotRepository>\n",[53,10422,10423],{"class":55,"line":496},[53,10424,10425],{},"\u003C/distributionManagement>\n",[2207,10427,10429],{"id":10428},"putting-it-all-together","Putting it all together",[18,10431,10432,10433,10436,10437,10440],{},"So far we have configured the GitHub release plugin to deploy our artifacts to the GitHub Releases page and setup Maven\nreleases. Now it’s time to glue the parts together. In order to do this we have to create a ",[50,10434,10435],{},"settings.xml","for use with\nMaven, a ",[50,10438,10439],{},".travis.yml"," that manages our Travis CI builds and we have to configure some environment variables in Travis\nCI itself. Furthermore we need a small shell script that orchestrates our release.",[649,10442,10444],{"id":10443},"maven-settings","Maven settings",[18,10446,10447,10448,10450,10451,10454,10455,10458,10459,10462,10463,10466,10467,10472],{},"Create a new ",[50,10449,10435],{},"file in your repository, e.g in a ",[50,10452,10453],{},".travis/","directory. The content of this file should look\nlike the following snippet. The ",[50,10456,10457],{},"\u003Cserver>","ids have to match the ids in ",[50,10460,10461],{},"\u003CdistributionManagement>","and the ",[50,10464,10465],{},"\u003CserverId>","of\nthe GitHub release plugin exactly. Do not use static credentials here! You don’t want everyone who stumbles upon your\nrepository on GitHub to have write access to your Nexus/Artifactory and GitHub. We will use Travis CI’s capability to\ninject environment variables into builds;\nthe ",[585,10468,10471],{"href":10469,"rel":10470},"https://synyx.de/blog/2018-01-24-travisci-github-releases/?page=3#configuring-travis-ci-itself",[589],"environment variables","\nwill be configured soon.",[43,10474,10476],{"className":3792,"code":10475,"language":3794,"meta":48,"style":48},"\u003Csettings xmlns=\"http://maven.apache.org/SETTINGS/1.0.0\"\n xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n xsi:schemaLocation=\"http://maven.apache.org/SETTINGS/1.0.0\n http://maven.apache.org/xsd/settings-1.0.0.xsd\">\n \u003Cservers>\n \u003Cserver>\n \u003Cid>oss\u003C/id>\n \u003Cusername>${env.NEXUS_USERNAME}\u003C/username>\n \u003Cpassword>${env.NEXUS_PASSWORD}\u003C/password>\n \u003C/server>\n \u003Cserver>\n \u003Cid>github\u003C/id>\n \u003Cusername>${env.GITHUB_USERNAME}\u003C/username>\n \u003Cpassword>${env.GITHUB_TOKEN}\u003C/password>\n \u003C/server>\n \u003C/servers>\n\n\u003C/settings>\n",[50,10477,10478,10483,10488,10493,10498,10503,10508,10513,10518,10523,10528,10532,10537,10542,10547,10551,10556,10560],{"__ignoreMap":48},[53,10479,10480],{"class":55,"line":56},[53,10481,10482],{},"\u003Csettings xmlns=\"http://maven.apache.org/SETTINGS/1.0.0\"\n",[53,10484,10485],{"class":55,"line":86},[53,10486,10487],{}," xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n",[53,10489,10490],{"class":55,"line":126},[53,10491,10492],{}," xsi:schemaLocation=\"http://maven.apache.org/SETTINGS/1.0.0\n",[53,10494,10495],{"class":55,"line":163},[53,10496,10497],{}," http://maven.apache.org/xsd/settings-1.0.0.xsd\">\n",[53,10499,10500],{"class":55,"line":186},[53,10501,10502],{}," \u003Cservers>\n",[53,10504,10505],{"class":55,"line":221},[53,10506,10507],{}," \u003Cserver>\n",[53,10509,10510],{"class":55,"line":242},[53,10511,10512],{}," \u003Cid>oss\u003C/id>\n",[53,10514,10515],{"class":55,"line":273},[53,10516,10517],{}," \u003Cusername>${env.NEXUS_USERNAME}\u003C/username>\n",[53,10519,10520],{"class":55,"line":279},[53,10521,10522],{}," \u003Cpassword>${env.NEXUS_PASSWORD}\u003C/password>\n",[53,10524,10525],{"class":55,"line":496},[53,10526,10527],{}," \u003C/server>\n",[53,10529,10530],{"class":55,"line":503},[53,10531,10507],{},[53,10533,10534],{"class":55,"line":509},[53,10535,10536],{}," \u003Cid>github\u003C/id>\n",[53,10538,10539],{"class":55,"line":515},[53,10540,10541],{}," \u003Cusername>${env.GITHUB_USERNAME}\u003C/username>\n",[53,10543,10544],{"class":55,"line":521},[53,10545,10546],{}," \u003Cpassword>${env.GITHUB_TOKEN}\u003C/password>\n",[53,10548,10549],{"class":55,"line":527},[53,10550,10527],{},[53,10552,10553],{"class":55,"line":533},[53,10554,10555],{}," \u003C/servers>\n",[53,10557,10558],{"class":55,"line":539},[53,10559,500],{"emptyLinePlaceholder":499},[53,10561,10562],{"class":55,"line":545},[53,10563,10564],{},"\u003C/settings>\n",[649,10566,10568],{"id":10567},"release-script","Release script",[18,10570,10571,10572,10575],{},"We need a small shell script that orchestrates our releases. This script sets the correct version, creates a release and\nuploads it to our Maven repository and to GitHub. To release the correct version the",[50,10573,10574],{},"TRAVIS_TAG"," environment variable\nwill be used. Travis CI uses this variable to inject the value of the git tag into the build.",[43,10577,10579],{"className":3792,"code":10578,"language":3794,"meta":48,"style":48},"#!/usr/bin/env bash\n\nset -e\n\necho \"Ensuring that pom matches $TRAVIS_TAG\"\n./mvnw org.codehaus.mojo:versions-maven-plugin:2.5:set -DnewVersion=$TRAVIS_TAG\n\necho \"Uploading to oss repo and GitHub\"\n./mvnw deploy --settings .travis/settings.xml -DskipTests=true --batch-mode --update-snapshots -Prelease\n",[50,10580,10581,10586,10590,10595,10599,10604,10609,10613,10618],{"__ignoreMap":48},[53,10582,10583],{"class":55,"line":56},[53,10584,10585],{},"#!/usr/bin/env bash\n",[53,10587,10588],{"class":55,"line":86},[53,10589,500],{"emptyLinePlaceholder":499},[53,10591,10592],{"class":55,"line":126},[53,10593,10594],{},"set -e\n",[53,10596,10597],{"class":55,"line":163},[53,10598,500],{"emptyLinePlaceholder":499},[53,10600,10601],{"class":55,"line":186},[53,10602,10603],{},"echo \"Ensuring that pom matches $TRAVIS_TAG\"\n",[53,10605,10606],{"class":55,"line":221},[53,10607,10608],{},"./mvnw org.codehaus.mojo:versions-maven-plugin:2.5:set -DnewVersion=$TRAVIS_TAG\n",[53,10610,10611],{"class":55,"line":242},[53,10612,500],{"emptyLinePlaceholder":499},[53,10614,10615],{"class":55,"line":273},[53,10616,10617],{},"echo \"Uploading to oss repo and GitHub\"\n",[53,10619,10620],{"class":55,"line":279},[53,10621,10622],{},"./mvnw deploy --settings .travis/settings.xml -DskipTests=true --batch-mode --update-snapshots -Prelease\n",[18,10624,10625,10626,10629,10630,10633,10634,10637,10638,10641],{},"The script first sets the ",[50,10627,10628],{},"\u003Cversion>","in the pom exactly to our git tag’s value. So your tag always matches the version\nyou want to release, e.g. ",[50,10631,10632],{},"1.0","or ",[50,10635,10636],{},"1.5.1",". The second part creates the release. We need to reference the Maven settings\nin our repository here so that Travis CI has access rights to the Maven repositories and GitHub Releases. The important\npart here is to activate the",[50,10639,10640],{},"release","profile. This tells Maven to not only create and upload a Maven release but also to\ncreate a new GitHub Release.",[18,10643,10644,10645,10648,10649,10651,10652,5881],{},"Name this script ",[50,10646,10647],{},"release.sh",", put it inside the ",[50,10650,10453],{},"directory and make it executable (",[50,10653,10654],{},"chmod +x",[649,10656,10658],{"id":10657},"build-configuration","Build configuration",[18,10660,10661,10662,10664],{},"Travis CI uses a file named ",[50,10663,10439],{},"at the root of a GitHub repository. The snipped contains the necessary steps.",[18,10666,10667,10668,10671,10672,10675],{},"In a normal build we just want to execute a simple ",[50,10669,10670],{},"clean verify",". The ",[50,10673,10674],{},"verify","goal will execute unit and integration\ntests. To make subsequent builds faster, we want to cache the m2 repositories during builds.",[18,10677,10678,10679,10681,10682,10685,10686,986],{},"The most important part is the deploy section. Here we configure Travis CI to run the ",[50,10680,10647],{},"script if and only if\na tag has been pushed (",[50,10683,10684],{},"tags: true",") on the repo ",[50,10687,10688],{},"example/project",[43,10690,10692],{"className":3792,"code":10691,"language":3794,"meta":48,"style":48},"sudo: false\nlanguage: java\njdk:\n - oraclejdk8\nscript: ./mvnw clean verify\ncache:\n directories:\n - $HOME/.m2\ndeploy:\n provider: script\n script: .travis/release.sh\n skip_cleanup: true\n on:\n repo: example/project\n tags: true\n jdk: oraclejdk8\n",[50,10693,10694,10699,10704,10709,10714,10719,10724,10729,10734,10739,10744,10749,10754,10759,10764,10769],{"__ignoreMap":48},[53,10695,10696],{"class":55,"line":56},[53,10697,10698],{},"sudo: false\n",[53,10700,10701],{"class":55,"line":86},[53,10702,10703],{},"language: java\n",[53,10705,10706],{"class":55,"line":126},[53,10707,10708],{},"jdk:\n",[53,10710,10711],{"class":55,"line":163},[53,10712,10713],{}," - oraclejdk8\n",[53,10715,10716],{"class":55,"line":186},[53,10717,10718],{},"script: ./mvnw clean verify\n",[53,10720,10721],{"class":55,"line":221},[53,10722,10723],{},"cache:\n",[53,10725,10726],{"class":55,"line":242},[53,10727,10728],{}," directories:\n",[53,10730,10731],{"class":55,"line":273},[53,10732,10733],{}," - $HOME/.m2\n",[53,10735,10736],{"class":55,"line":279},[53,10737,10738],{},"deploy:\n",[53,10740,10741],{"class":55,"line":496},[53,10742,10743],{}," provider: script\n",[53,10745,10746],{"class":55,"line":503},[53,10747,10748],{}," script: .travis/release.sh\n",[53,10750,10751],{"class":55,"line":509},[53,10752,10753],{}," skip_cleanup: true\n",[53,10755,10756],{"class":55,"line":515},[53,10757,10758],{}," on:\n",[53,10760,10761],{"class":55,"line":521},[53,10762,10763],{}," repo: example/project\n",[53,10765,10766],{"class":55,"line":527},[53,10767,10768],{}," tags: true\n",[53,10770,10771],{"class":55,"line":533},[53,10772,10773],{}," jdk: oraclejdk8\n",[649,10775,10777],{"id":10776},"configuring-travis-ci-itself","Configuring Travis CI itself",[18,10779,10780],{},"Now we need to teach Travis CI the values of the environment variables used in our Maven settings. To do this navigate\nto the Travis CI settings for your project:",[18,10782,10783],{},[2223,10784],{"alt":10785,"src":10786},"travis environment","https://media.synyx.de/uploads/2018/01/travis-environment-768x282.png",[18,10788,10789,10790,10793,10794,10797,10798,10803,10804,10793,10807,10810,10811,10814,10815,10820],{},"Add ",[50,10791,10792],{},"NEXUS_USER","and ",[50,10795,10796],{},"NEXUS_PASSWORD","for your\nnewly ",[585,10799,10802],{"href":10800,"rel":10801},"https://synyx.de/blog/2018-01-24-travisci-github-releases/?page=3#preparing-maven-releases",[589],"created user",". You\nalso need to configure ",[50,10805,10806],{},"GITHUB_USERNAME",[50,10808,10809],{},"GITHUB_TOKEN",". But even though the field is called password, what your\nreally want to configure here is your ",[27,10812,10813],{},"GitHub API token",". Otherwise, the GitHub release plugin will not be able to\nupload artifacts. You can obtain your personal access token ",[585,10816,10819],{"href":10817,"rel":10818},"https://github.com/settings/tokens",[589],"here",". The token needs\naccess to the repo scope.",[18,10822,10823,10824,10827],{},"And that’s it. Now you can simply create a new tag in your repository (",[50,10825,10826],{},"git tag -a 1.1 -m \"Release 1.1\"","), push it to\nGitHub and Travis CI will trigger the release process. Happy releasing!",[2207,10829,10830],{"id":10180},"Pitfalls",[18,10832,10833],{},"Some advice so that you do not encounter the same problems we did:",[577,10835,10836,10845,10857,10860],{},[580,10837,10838,10839,10841,10842],{},"Do not forget to make ",[50,10840,10647],{},"executable otherwise the Travis CI build will fail with a rather unhelpful message:\n",[50,10843,10844],{},"Script failed with status 127.",[580,10846,10847,10848,10851,10852,10854,10855,986],{},"The ",[50,10849,10850],{},"serverId","in the GitHub release plugin’s configuration refers to a ",[50,10853,10457],{},"entry in Maven’s ",[50,10856,10435],{},[580,10858,10859],{},"Do not be tempted to use your GitHub account password to configure the server settings for the GitHub release plugin.\nEven though the field is called password it’s really an API token that is needed here.",[580,10861,10862,10863,10866],{},"Do not change the value of ",[50,10864,10865],{},"\u003CtagName>","when configuring the GitHub release plugin. Using anything different from the\nproject’s version (i.e. the git tag’s value), will lead to recursive release builds. This happens because the release\nplugin will create and push a new tag if it does not already exist. Pushing a tag invokes another Travis CI build\nwhich will create a new tag and so on.",[607,10868,989],{},{"title":48,"searchDepth":86,"depth":86,"links":10870},[10871,10872,10873,10879],{"id":10081,"depth":86,"text":10082},{"id":10363,"depth":86,"text":10364},{"id":10428,"depth":86,"text":10429,"children":10874},[10875,10876,10877,10878],{"id":10443,"depth":126,"text":10444},{"id":10567,"depth":126,"text":10568},{"id":10657,"depth":126,"text":10658},{"id":10776,"depth":126,"text":10777},{"id":10180,"depth":86,"text":10830},[613,614],"2018-01-24T12:05:58","This post outlines the steps needed to simultaneously deploy to Maven repositories and to GitHub Releases. Every time a\\ntagged commit is pushed, a Travis CI build will be triggered automatically and start the release process. This blog post\\nuses Sonatype Nexus as an example for a Maven repository manager.","https://synyx.de/blog/using-travis-ci-to-deploy-to-maven-repositories-and-github-releases/",{},"/blog/using-travis-ci-to-deploy-to-maven-repositories-and-github-releases",{"title":10068,"description":10078},"blog/using-travis-ci-to-deploy-to-maven-repositories-and-github-releases",[10889,10890,290,10891,10892,10893],"artifactory","github","maven","nexus","travisci","This post outlines the steps needed to simultaneously deploy to Maven repositories and to GitHub Releases. Every time a tagged commit is pushed, a Travis CI build will be triggered automatically and start the release process. This blog post uses Sonatype Nexus as an example for a Maven repository manager. Preparing GitHub Releases Sergey Mashkov has written a Maven plugin that allows us to create a new release on our project’s releases page and upload our build artifacts to a release.","2TPRgpLddHBxZy1b9W1YaqOnatHf0HzZXEjowW2rwB4",{"id":10897,"title":10898,"author":10899,"body":10900,"category":12267,"date":12268,"description":12269,"extension":617,"link":12270,"meta":12271,"navigation":499,"path":12272,"seo":12273,"slug":10904,"stem":12274,"tags":12275,"teaser":12276,"__hash__":12277},"blog/blog/implementing-a-waiting-component-with-user-experience-in-mind.md","Implementing a waiting component with user experience in mind",[6892],{"type":11,"value":10901,"toc":12262},[10902,10905,10908,10914,10932,10957,10965,10969,10972,11045,11059,11223,11239,11245,11249,11252,11258,11264,11267,11289,11492,11495,11520,11592,11607,11663,11681,11767,11796,11800,11803,11842,11845,11870,11880,11936,11945,11951,11959,11981,12082,12094,12259],[14,10903,10898],{"id":10904},"implementing-a-waiting-component-with-user-experience-in-mind",[18,10906,10907],{},"Giving fast feedback to users has been improved by single page applications over the request response cycle. However,\nthere is one serious downside with this approach. Elements are popping out of the wild on various sections everytime.\nParticular data loading indicated by a waiting animation is affected with this phenomenon. In this blog I’d like to\npresent you our solution of a UI component that takes care about delaying the rendering of the animation.",[18,10909,10910,10913],{},[27,10911,10912],{},"Disclaimer:"," we’re using React in our frontend (without server side rendering). In case you don’t know React: React\nprovides lifecycle hooks for UI components like",[577,10915,10916,10921,10926],{},[580,10917,10918],{},[50,10919,10920],{},"render",[580,10922,10923],{},[50,10924,10925],{},"willUpdate",[580,10927,10928,10929],{},"or .",[50,10930,10931],{},"didUpdate",[18,10933,10934,10935,10938,10939,10942,10943,10945,10946,9207,10948,10950,10951,10956],{},"These hooks can be used to do internal stuff your component requires to be rendered correctly. React components can\neither be updated with changing ",[50,10936,10937],{},"properties","or updating ",[50,10940,10941],{},"state",". Properties are actually the public API of the\ncomponent. The",[50,10944,10941],{},", however, is the antagonist which can only be updated by the component itself. Changing\n",[50,10947,10937],{},[50,10949,10941],{}," triggers specific lifecycle hooks and finally a rerendering of the component. Don’t hesitate to\nread the ",[585,10952,10955],{"href":10953,"rel":10954},"https://reactjs.org/docs/rendering-elements.html",[589],"react docs"," for more detail.",[18,10958,10959,10960,986],{},"tl;dr source code is available ",[585,10961,10964],{"href":10962,"rel":10963},"https://github.com/bseber/waiting",[589],"on github",[2207,10966,10968],{"id":10967},"loading-or-not-loading","loading or not loading",[18,10970,10971],{},"At first we have to satisfy the basic need. The user must get feedback whether we’re loading data currently or not. The\nmost simple component takes a boolean property that reflects the current state.",[43,10973,10975],{"className":7263,"code":10974,"language":7265,"meta":48,"style":48},"class Waiting extends React.Component {\n render() {\n return this.props.loading ? \u003Cdiv>loading...\u003C/div> : null;\n }\n}\n",[50,10976,10977,10996,11003,11037,11041],{"__ignoreMap":48},[53,10978,10979,10981,10984,10986,10989,10991,10994],{"class":55,"line":56},[53,10980,7272],{"class":389},[53,10982,10983],{"class":59}," Waiting",[53,10985,7278],{"class":389},[53,10987,10988],{"class":59}," React",[53,10990,986],{"class":82},[53,10992,10993],{"class":59},"Component",[53,10995,6176],{"class":82},[53,10997,10998,11001],{"class":55,"line":86},[53,10999,11000],{"class":59}," render",[53,11002,7291],{"class":82},[53,11004,11005,11008,11011,11014,11017,11020,11022,11025,11027,11030,11032,11035],{"class":55,"line":126},[53,11006,11007],{"class":389}," return",[53,11009,11010],{"class":89}," this",[53,11012,11013],{"class":82},".props.loading ",[53,11015,11016],{"class":389},"?",[53,11018,11019],{"class":82}," \u003C",[53,11021,7091],{"class":7115},[53,11023,11024],{"class":82},">loading...\u003C/",[53,11026,7091],{"class":7115},[53,11028,11029],{"class":82},"> ",[53,11031,4101],{"class":389},[53,11033,11034],{"class":89}," null",[53,11036,1727],{"class":82},[53,11038,11039],{"class":55,"line":163},[53,11040,7384],{"class":82},[53,11042,11043],{"class":55,"line":186},[53,11044,282],{"class":82},[18,11046,11047,11048,11051,11052,11054,11055,11058],{},"This component can now be used in our App. The loading info is visible as long as the",[50,11049,11050],{},"loading"," flag is set to ",[50,11053,408],{}," and\nhidden as soon as the flag is toggled.",[50,11056,11057],{},"MyDataView"," is just another component that takes care about rendering the data.",[43,11060,11062],{"className":7263,"code":11061,"language":7265,"meta":48,"style":48},"class MyApp extends React.Component {\n // initialState\n // no data existent and we're loading currently\n state = {\n data: null,\n loading: true\n };\n\n renderData() {\n return this.state.data ? : null;\n }\n\n render() {\n return (\n \u003Cdiv>\n\n {this.renderData()}\n \u003C/div>\n );\n }\n}\n",[50,11063,11064,11081,11086,11091,11100,11110,11118,11123,11127,11134,11152,11156,11160,11166,11173,11182,11186,11201,11210,11215,11219],{"__ignoreMap":48},[53,11065,11066,11068,11071,11073,11075,11077,11079],{"class":55,"line":56},[53,11067,7272],{"class":389},[53,11069,11070],{"class":59}," MyApp",[53,11072,7278],{"class":389},[53,11074,10988],{"class":59},[53,11076,986],{"class":82},[53,11078,10993],{"class":59},[53,11080,6176],{"class":82},[53,11082,11083],{"class":55,"line":86},[53,11084,11085],{"class":3698}," // initialState\n",[53,11087,11088],{"class":55,"line":126},[53,11089,11090],{"class":3698}," // no data existent and we're loading currently\n",[53,11092,11093,11096,11098],{"class":55,"line":163},[53,11094,11095],{"class":6186}," state",[53,11097,1245],{"class":389},[53,11099,6176],{"class":82},[53,11101,11102,11105,11108],{"class":55,"line":186},[53,11103,11104],{"class":82}," data: ",[53,11106,11107],{"class":89},"null",[53,11109,2252],{"class":82},[53,11111,11112,11115],{"class":55,"line":221},[53,11113,11114],{"class":82}," loading: ",[53,11116,11117],{"class":89},"true\n",[53,11119,11120],{"class":55,"line":242},[53,11121,11122],{"class":82}," };\n",[53,11124,11125],{"class":55,"line":273},[53,11126,500],{"emptyLinePlaceholder":499},[53,11128,11129,11132],{"class":55,"line":279},[53,11130,11131],{"class":59}," renderData",[53,11133,7291],{"class":82},[53,11135,11136,11138,11140,11143,11145,11148,11150],{"class":55,"line":496},[53,11137,11007],{"class":389},[53,11139,11010],{"class":89},[53,11141,11142],{"class":82},".state.data ",[53,11144,11016],{"class":389},[53,11146,11147],{"class":389}," :",[53,11149,11034],{"class":89},[53,11151,1727],{"class":82},[53,11153,11154],{"class":55,"line":503},[53,11155,7384],{"class":82},[53,11157,11158],{"class":55,"line":509},[53,11159,500],{"emptyLinePlaceholder":499},[53,11161,11162,11164],{"class":55,"line":515},[53,11163,11000],{"class":59},[53,11165,7291],{"class":82},[53,11167,11168,11170],{"class":55,"line":521},[53,11169,11007],{"class":389},[53,11171,11172],{"class":82}," (\n",[53,11174,11175,11178,11180],{"class":55,"line":527},[53,11176,11177],{"class":82}," \u003C",[53,11179,7091],{"class":7115},[53,11181,7134],{"class":82},[53,11183,11184],{"class":55,"line":533},[53,11185,500],{"emptyLinePlaceholder":499},[53,11187,11188,11191,11193,11195,11198],{"class":55,"line":539},[53,11189,11190],{"class":82}," {",[53,11192,7366],{"class":89},[53,11194,986],{"class":82},[53,11196,11197],{"class":59},"renderData",[53,11199,11200],{"class":82},"()}\n",[53,11202,11203,11206,11208],{"class":55,"line":545},[53,11204,11205],{"class":82}," \u003C/",[53,11207,7091],{"class":7115},[53,11209,7134],{"class":82},[53,11211,11212],{"class":55,"line":2414},[53,11213,11214],{"class":82}," );\n",[53,11216,11217],{"class":55,"line":2426},[53,11218,7384],{"class":82},[53,11220,11221],{"class":55,"line":2438},[53,11222,282],{"class":82},[18,11224,11225,11226,11231,11232,11235,11236,11238],{},"One benefit of this solution is that we now have a reusable component. We don’t have to care about the visualisation\nstuff anymore at every place. It could render the div element with a static text or it could render some more advanced\ncss animation. For instance we could change the loading animation to use this\nawesome ",[585,11227,11230],{"href":11228,"rel":11229},"https://codepen.io/dissimulate/pen/vlfyA",[589],"codepen"," with refactoring the",[50,11233,11234],{},"Waiting"," component implementation only.\nConsumers of the ",[50,11237,11234],{}," component wouldn’t have to be touched.",[18,11240,11241,11242,11244],{},"A second benefit is the really simple implementation of the ",[50,11243,11234],{}," component. Even without knowing React or\nJavaScript in detail you quickly see that a div or nothing is rendered.",[2207,11246,11248],{"id":11247},"pretend-not-loading-when-its-fast","pretend not loading when it’s fast",[18,11250,11251],{},"The next step is user experience improvement. We don’t want to render the loading text",[18,11253,11254,11255,11257],{},"when the ",[50,11256,11050],{}," flag is toggled back to false within 100ms.",[11259,11260,11261],"blockquote",{},[18,11262,11263],{},"0.1 second is about the limit for having the user feel that the system is reacting instantaneously, meaning that no\nspecial feedback is necessary except to display the result.",[18,11265,11266],{},"Jakob Nielsen",[18,11268,11269,11270,11272,11273,11276,11277,11280,11281,11284,11285,11288],{},"To keep changes small let us first map the loading property value to the internal component state. React takes care\nabout calling ",[50,11271,10920],{}," when either new properties are given or state is changed with ",[50,11274,11275],{},"setState",". So in the constructor\nwe’re mapping the original loading flag to render the initially intended state. Let’s say the ",[573,11278,11279],{},"yep, we’re currently\nloading"," state. Soon afterwards the property will eventually swap to ",[573,11282,11283],{},"nop, we’re finished loading",". This can be\nintercepted by the ",[50,11286,11287],{},"componentWillReceiveProps"," lifecycle hook . Just like in the constructor we’re mapping the property\nto the internal state.",[43,11290,11292],{"className":7263,"code":11291,"language":7265,"meta":48,"style":48},"class Waiting extends React.Component {\n+ constructor(props) {\n+ super();\n+ this.state = { loading: props.loading };\n+ }\n+\n+ componentWillReceiveProps(nextProps) {\n+ if (nextProps.loading !== this.props.loading) {\n+ this.setState({ loading: nextProps.loading });\n+ }\n+ }\n+\n render() {\n- return this.props.loading ? \u003Cdiv>loading...\u003C/div> : null;\n+ return this.state.loading ? \u003Cdiv>loading...\u003C/div> : null;\n }\n}\n",[50,11293,11294,11310,11326,11335,11349,11355,11360,11374,11391,11405,11411,11417,11421,11427,11455,11484,11488],{"__ignoreMap":48},[53,11295,11296,11298,11300,11302,11304,11306,11308],{"class":55,"line":56},[53,11297,7272],{"class":389},[53,11299,10983],{"class":59},[53,11301,7278],{"class":389},[53,11303,10988],{"class":59},[53,11305,986],{"class":82},[53,11307,10993],{"class":59},[53,11309,6176],{"class":82},[53,11311,11312,11315,11318,11320,11323],{"class":55,"line":86},[53,11313,11314],{"class":389},"+",[53,11316,11317],{"class":389}," constructor",[53,11319,1067],{"class":82},[53,11321,11322],{"class":6186},"props",[53,11324,11325],{"class":82},") {\n",[53,11327,11328,11330,11333],{"class":55,"line":126},[53,11329,11314],{"class":389},[53,11331,11332],{"class":89}," super",[53,11334,7335],{"class":82},[53,11336,11337,11339,11341,11344,11346],{"class":55,"line":163},[53,11338,11314],{"class":389},[53,11340,7296],{"class":89},[53,11342,11343],{"class":82},".state ",[53,11345,390],{"class":389},[53,11347,11348],{"class":82}," { loading: props.loading };\n",[53,11350,11351,11353],{"class":55,"line":186},[53,11352,11314],{"class":389},[53,11354,7384],{"class":82},[53,11356,11357],{"class":55,"line":221},[53,11358,11359],{"class":389},"+\n",[53,11361,11362,11364,11367,11369,11372],{"class":55,"line":242},[53,11363,11314],{"class":389},[53,11365,11366],{"class":59}," componentWillReceiveProps",[53,11368,1067],{"class":82},[53,11370,11371],{"class":6186},"nextProps",[53,11373,11325],{"class":82},[53,11375,11376,11378,11380,11383,11386,11388],{"class":55,"line":273},[53,11377,11314],{"class":389},[53,11379,6486],{"class":389},[53,11381,11382],{"class":82}," (nextProps.loading ",[53,11384,11385],{"class":389},"!==",[53,11387,11010],{"class":89},[53,11389,11390],{"class":82},".props.loading) {\n",[53,11392,11393,11395,11398,11400,11402],{"class":55,"line":279},[53,11394,11314],{"class":389},[53,11396,11397],{"class":89}," this",[53,11399,986],{"class":82},[53,11401,11275],{"class":59},[53,11403,11404],{"class":82},"({ loading: nextProps.loading });\n",[53,11406,11407,11409],{"class":55,"line":496},[53,11408,11314],{"class":389},[53,11410,860],{"class":82},[53,11412,11413,11415],{"class":55,"line":503},[53,11414,11314],{"class":389},[53,11416,7384],{"class":82},[53,11418,11419],{"class":55,"line":509},[53,11420,11359],{"class":389},[53,11422,11423,11425],{"class":55,"line":515},[53,11424,11000],{"class":59},[53,11426,7291],{"class":82},[53,11428,11429,11431,11433,11435,11437,11439,11441,11443,11445,11447,11449,11451,11453],{"class":55,"line":521},[53,11430,8653],{"class":389},[53,11432,11007],{"class":389},[53,11434,11010],{"class":89},[53,11436,11013],{"class":82},[53,11438,11016],{"class":389},[53,11440,11019],{"class":82},[53,11442,7091],{"class":7115},[53,11444,11024],{"class":82},[53,11446,7091],{"class":7115},[53,11448,11029],{"class":82},[53,11450,4101],{"class":389},[53,11452,11034],{"class":89},[53,11454,1727],{"class":82},[53,11456,11457,11459,11461,11463,11466,11468,11470,11472,11474,11476,11478,11480,11482],{"class":55,"line":527},[53,11458,11314],{"class":389},[53,11460,11007],{"class":389},[53,11462,11010],{"class":89},[53,11464,11465],{"class":82},".state.loading ",[53,11467,11016],{"class":389},[53,11469,11019],{"class":82},[53,11471,7091],{"class":7115},[53,11473,11024],{"class":82},[53,11475,7091],{"class":7115},[53,11477,11029],{"class":82},[53,11479,4101],{"class":389},[53,11481,11034],{"class":89},[53,11483,1727],{"class":82},[53,11485,11486],{"class":55,"line":533},[53,11487,7384],{"class":82},[53,11489,11490],{"class":55,"line":539},[53,11491,282],{"class":82},[18,11493,11494],{},"So far we’ve gained nothing but complexity /o",[18,11496,11497,11498,11500,11501,11503,11504,11506,11507,11510,11511,11513,11514,11516,11517,986],{},"Now to the interesting part. As soon as the",[50,11499,11234],{}," component receives new properties we’re starting a timeout to\nupdate the internal state with a delay of 100ms. Remember react calls",[50,11502,10920],{},"on property changes as well as on state\nchanges. So",[50,11505,10920],{}," is called two times now actually. The first time it renders the same as previously ",[573,11508,11509],{},"nop, we’re not\nloading",". After 100ms",[50,11512,11275],{}," is called which triggers the second ",[50,11515,10920],{},"cycle ",[573,11518,11519],{},"yep, we’re loading",[43,11521,11523],{"className":288,"code":11522,"language":290,"meta":48,"style":48},"class Waiting extends React.Component {\n constructor() { ... }\n\n componentWillReceiveProps(nextProps) {\n if (nextProps.loading !== this.props.loading) {\n+ window.clearTimeout(this._loadingTimeout);\n+ this._loadingTimeout = window.setTimeout(() => {\n this.setState({ loading: nextProps.loading });\n+ }, 100);\n }\n\n render() { ... }\n }\n}\n",[50,11524,11525,11530,11535,11539,11544,11549,11554,11559,11564,11569,11574,11578,11583,11588],{"__ignoreMap":48},[53,11526,11527],{"class":55,"line":56},[53,11528,11529],{},"class Waiting extends React.Component {\n",[53,11531,11532],{"class":55,"line":86},[53,11533,11534],{}," constructor() { ... }\n",[53,11536,11537],{"class":55,"line":126},[53,11538,500],{"emptyLinePlaceholder":499},[53,11540,11541],{"class":55,"line":163},[53,11542,11543],{}," componentWillReceiveProps(nextProps) {\n",[53,11545,11546],{"class":55,"line":186},[53,11547,11548],{}," if (nextProps.loading !== this.props.loading) {\n",[53,11550,11551],{"class":55,"line":221},[53,11552,11553],{},"+ window.clearTimeout(this._loadingTimeout);\n",[53,11555,11556],{"class":55,"line":242},[53,11557,11558],{},"+ this._loadingTimeout = window.setTimeout(() => {\n",[53,11560,11561],{"class":55,"line":273},[53,11562,11563],{}," this.setState({ loading: nextProps.loading });\n",[53,11565,11566],{"class":55,"line":279},[53,11567,11568],{},"+ }, 100);\n",[53,11570,11571],{"class":55,"line":496},[53,11572,11573],{}," }\n",[53,11575,11576],{"class":55,"line":503},[53,11577,500],{"emptyLinePlaceholder":499},[53,11579,11580],{"class":55,"line":509},[53,11581,11582],{}," render() { ... }\n",[53,11584,11585],{"class":55,"line":515},[53,11586,11587],{}," }\n",[53,11589,11590],{"class":55,"line":521},[53,11591,282],{},[18,11593,11594,11595,11598,11599,11602,11603,11606],{},"But wait, what’s happening now when the loading property is swapped the other way around from ",[573,11596,11597],{},"yep"," to ",[573,11600,11601],{},"nop","? Remember\nthe implementation of ",[50,11604,11605],{},"MyApp"," from above?",[43,11608,11610],{"className":288,"code":11609,"language":290,"meta":48,"style":48},"class MyApp extends React.Component {\n // ...\n render() {\n return (\n \u003Cdiv>\n\n {this.renderData()}\n \u003C/div>\n );\n }\n}\n",[50,11611,11612,11617,11622,11627,11632,11637,11641,11646,11651,11655,11659],{"__ignoreMap":48},[53,11613,11614],{"class":55,"line":56},[53,11615,11616],{},"class MyApp extends React.Component {\n",[53,11618,11619],{"class":55,"line":86},[53,11620,11621],{}," // ...\n",[53,11623,11624],{"class":55,"line":126},[53,11625,11626],{}," render() {\n",[53,11628,11629],{"class":55,"line":163},[53,11630,11631],{}," return (\n",[53,11633,11634],{"class":55,"line":186},[53,11635,11636],{}," \u003Cdiv>\n",[53,11638,11639],{"class":55,"line":221},[53,11640,500],{"emptyLinePlaceholder":499},[53,11642,11643],{"class":55,"line":242},[53,11644,11645],{}," {this.renderData()}\n",[53,11647,11648],{"class":55,"line":273},[53,11649,11650],{}," \u003C/div>\n",[53,11652,11653],{"class":55,"line":279},[53,11654,11214],{},[53,11656,11657],{"class":55,"line":496},[53,11658,7384],{},[53,11660,11661],{"class":55,"line":503},[53,11662,282],{},[18,11664,11665,11666,11668,11669,11671,11672,11675,11676,11678,11679,986],{},"The",[50,11667,11234],{}," component receives the updated loading flag ",[573,11670,2305],{}," and delays it’s internal rendering while\n",[50,11673,11674],{},"this.renderData()"," renders the actual data. So the loading info is shortly visible amongst the data. Fortunately this\ncan be fixed easily. We just have to update immediately when the ",[50,11677,11050],{}," property is set to ",[573,11680,2305],{},[43,11682,11684],{"className":288,"code":11683,"language":290,"meta":48,"style":48},"class Waiting extends React.Component {\n constructor() { ... }\n\n componentWillReceiveProps(nextProps) {\n if (nextProps.loading !== this.props.loading) {\n window.clearTimeout(this._loadingTimeout);\n+ if (nextProps.loading) {\n this._loadingTimeout = window.setTimeout(() => {\n this.setState({ loading: nextProps.loading });\n }, 100);\n+ } else {\n+ this.setState({ loading: false });\n+ }\n }\n }\n\n render() { ... }\n}\n",[50,11685,11686,11690,11694,11698,11702,11706,11711,11716,11721,11726,11731,11736,11741,11746,11750,11754,11758,11763],{"__ignoreMap":48},[53,11687,11688],{"class":55,"line":56},[53,11689,11529],{},[53,11691,11692],{"class":55,"line":86},[53,11693,11534],{},[53,11695,11696],{"class":55,"line":126},[53,11697,500],{"emptyLinePlaceholder":499},[53,11699,11700],{"class":55,"line":163},[53,11701,11543],{},[53,11703,11704],{"class":55,"line":186},[53,11705,11548],{},[53,11707,11708],{"class":55,"line":221},[53,11709,11710],{}," window.clearTimeout(this._loadingTimeout);\n",[53,11712,11713],{"class":55,"line":242},[53,11714,11715],{},"+ if (nextProps.loading) {\n",[53,11717,11718],{"class":55,"line":273},[53,11719,11720],{}," this._loadingTimeout = window.setTimeout(() => {\n",[53,11722,11723],{"class":55,"line":279},[53,11724,11725],{}," this.setState({ loading: nextProps.loading });\n",[53,11727,11728],{"class":55,"line":496},[53,11729,11730],{}," }, 100);\n",[53,11732,11733],{"class":55,"line":503},[53,11734,11735],{},"+ } else {\n",[53,11737,11738],{"class":55,"line":509},[53,11739,11740],{},"+ this.setState({ loading: false });\n",[53,11742,11743],{"class":55,"line":515},[53,11744,11745],{},"+ }\n",[53,11747,11748],{"class":55,"line":521},[53,11749,860],{},[53,11751,11752],{"class":55,"line":527},[53,11753,7384],{},[53,11755,11756],{"class":55,"line":533},[53,11757,500],{"emptyLinePlaceholder":499},[53,11759,11760],{"class":55,"line":539},[53,11761,11762],{}," render() { ... }\n",[53,11764,11765],{"class":55,"line":545},[53,11766,282],{},[18,11768,11769,11770,11773,11774,11776,11777,11779,11780,11782,11783,11788,11789,11792,11793,8780],{},"Now we’ve gained a good user experience by not displaying the loading info if the loading property is toggled from ",[573,11771,11772],{},"yay","\nback to ",[573,11775,11601],{}," within 100ms. There is no flickering anymore o/ However, we’ve payed with some complexity in the\n",[50,11778,11234],{}," component and even have async stuff happening there. So testing consumers of the ",[50,11781,11234],{}," component could be\nconfusing. But in my opinion the better user experience is worth the complexity and tests should be fine as long\nas ",[585,11784,11787],{"href":11785,"rel":11786},"https://reactjs.org/docs/shallow-renderer.html",[589],"shallowRendering"," is used. Otherwise we have to use the timemachine\nfeature of the testing library (e.g. jest provides ",[50,11790,11791],{},"jest.useFakeTimers()"," and ",[50,11794,11795],{},"jest.runTimersToTime(100)",[2207,11797,11799],{"id":11798},"improved-handling-of-data-rendering","improved handling of data rendering",[18,11801,11802],{},"Currently we have a waiting component that takes care about delaying the loading info. But the consumer is still\nresponsible to check itself whether the data is available and should be rendered or not.",[43,11804,11806],{"className":7263,"code":11805,"language":7265,"meta":48,"style":48},"renderData() {\n return this.state.data\n ?\n : null;\n}\n",[50,11807,11808,11814,11824,11829,11838],{"__ignoreMap":48},[53,11809,11810,11812],{"class":55,"line":56},[53,11811,11197],{"class":59},[53,11813,7291],{"class":82},[53,11815,11816,11819,11821],{"class":55,"line":86},[53,11817,11818],{"class":389}," return",[53,11820,11010],{"class":89},[53,11822,11823],{"class":82},".state.data\n",[53,11825,11826],{"class":55,"line":126},[53,11827,11828],{"class":389}," ?\n",[53,11830,11831,11834,11836],{"class":55,"line":163},[53,11832,11833],{"class":389}," :",[53,11835,11034],{"class":89},[53,11837,1727],{"class":82},[53,11839,11840],{"class":55,"line":186},[53,11841,282],{"class":82},[18,11843,11844],{},"However, my collegues and my humble self could live with this redundancy actually. It is explicit and the waiting\ncomponent wouldn’t be bloated with more features and complexity. But in our project we had the following issue (amongst\nsome others…)",[18,11846,11847,11848,11850,11851,11854,11855,11858,11859,11862,11863,11865,11866,11869],{},"Given",[50,11849,11057],{},"renders a list of items with a headline and other eye candy stuff. It takes care about rendering a ",[573,11852,11853],{},"no\ndata"," info banner when the given list is empty. The default ",[50,11856,11857],{},"this.state.data"," value is an empty array instead of\nundefined or null to avoid the notorious ",[50,11860,11861],{},"Cannot read property XXX of undefined",". Then the code snippet above results in\nalways rendering ",[50,11864,11057],{}," and therefore the ",[573,11867,11868],{},"no data"," info banner (empty array is a truthy expression).",[18,11871,11872,11873,11875,11876,11879],{},"The unwanted ",[573,11874,11868],{}," info banner could be avoided by adding the ",[50,11877,11878],{},"this.state.loading"," flag to the condition. But that’s\nnot really satisfying since this adds more complexity which even will be copied and pasted into other components.",[43,11881,11883],{"className":7263,"code":11882,"language":7265,"meta":48,"style":48},"renderData() {\n return (this.state.data && !this.state.loading)\n ?\n : null;\n}\n",[50,11884,11885,11891,11920,11924,11932],{"__ignoreMap":48},[53,11886,11887,11889],{"class":55,"line":56},[53,11888,11197],{"class":59},[53,11890,7291],{"class":82},[53,11892,11893,11895,11897,11899,11901,11904,11907,11909,11912,11915,11917],{"class":55,"line":86},[53,11894,11818],{"class":389},[53,11896,7314],{"class":82},[53,11898,7366],{"class":89},[53,11900,11142],{"class":82},[53,11902,11903],{"class":389},"&",[53,11905,11906],{"class":82},"amp;",[53,11908,11903],{"class":389},[53,11910,11911],{"class":82},"amp; ",[53,11913,11914],{"class":389},"!",[53,11916,7366],{"class":89},[53,11918,11919],{"class":82},".state.loading)\n",[53,11921,11922],{"class":55,"line":126},[53,11923,11828],{"class":389},[53,11925,11926,11928,11930],{"class":55,"line":163},[53,11927,11833],{"class":389},[53,11929,11034],{"class":89},[53,11931,1727],{"class":82},[53,11933,11934],{"class":55,"line":186},[53,11935,282],{"class":82},[18,11937,11938,11939,11941,11942,11944],{},"Furthermore… remember the actual challenge we tried to solve with the ",[50,11940,11234],{},"component which delays the rendering of\nthe loading info? Exactly, we wanted to avoid flickering and displaying the loading info when the data is received\nwithin 100ms. Now we’ve added this again for ",[50,11943,11057],{},". The component will be unmounted and mounted within 42ms for\ninstance. The new data is visible but all eye candy around the data list (like the headline) is gone and rerendered\nwithin one blink of an eye.",[18,11946,11947,11948,11950],{},"So let’s improve the ",[50,11949,11234],{},"component to handle the rendering of it’s children. We have two react techniques to\nimplement this:",[577,11952,11953,11956],{},[580,11954,11955],{},"render props",[580,11957,11958],{},"function as child",[18,11960,11961,11962,11965,11966,11968,11969,11972,11973,11792,11975,11977,11978,11980],{},"Both are the same actually. The ",[573,11963,11964],{},"render prop"," pattern uses a function passed as component property to render something.\nThe ",[573,11967,11958],{}," pattern is… well… the same. ",[50,11970,11971],{},"children"," is just an additional property of a React component. The\ndifference between ",[573,11974,11955],{},[573,11976,11958],{}," is the syntax. Personally I prefer ",[573,11979,11955],{}," since this\nis more explicit and doesn’t leave room of misconception for people not knowing React and JSX in detail.",[43,11982,11984],{"className":7263,"code":11983,"language":7265,"meta":48,"style":48},"class RenderProps extends React.Component {\n render() {\n return this.renderData()} />;\n }\n}\n\nclass FunctionAsChild extends React.Component {\n render() {\n return {() => this.renderData()};\n }\n}\n",[50,11985,11986,12003,12009,12032,12036,12040,12044,12061,12067,12074,12078],{"__ignoreMap":48},[53,11987,11988,11990,11993,11995,11997,11999,12001],{"class":55,"line":56},[53,11989,7272],{"class":389},[53,11991,11992],{"class":59}," RenderProps",[53,11994,7278],{"class":389},[53,11996,10988],{"class":59},[53,11998,986],{"class":82},[53,12000,10993],{"class":59},[53,12002,6176],{"class":82},[53,12004,12005,12007],{"class":55,"line":86},[53,12006,11000],{"class":59},[53,12008,7291],{"class":82},[53,12010,12011,12013,12016,12018,12020,12023,12026,12029],{"class":55,"line":126},[53,12012,11007],{"class":389},[53,12014,12015],{"class":89}," this",[53,12017,986],{"class":82},[53,12019,11197],{"class":59},[53,12021,12022],{"class":82},"()} ",[53,12024,12025],{"class":389},"/&",[53,12027,12028],{"class":6186},"gt",[53,12030,12031],{"class":82},";;\n",[53,12033,12034],{"class":55,"line":163},[53,12035,7384],{"class":82},[53,12037,12038],{"class":55,"line":186},[53,12039,282],{"class":82},[53,12041,12042],{"class":55,"line":221},[53,12043,500],{"emptyLinePlaceholder":499},[53,12045,12046,12048,12051,12053,12055,12057,12059],{"class":55,"line":242},[53,12047,7272],{"class":389},[53,12049,12050],{"class":59}," FunctionAsChild",[53,12052,7278],{"class":389},[53,12054,10988],{"class":59},[53,12056,986],{"class":82},[53,12058,10993],{"class":59},[53,12060,6176],{"class":82},[53,12062,12063,12065],{"class":55,"line":273},[53,12064,11000],{"class":59},[53,12066,7291],{"class":82},[53,12068,12069,12071],{"class":55,"line":279},[53,12070,11007],{"class":389},[53,12072,12073],{"class":82}," {() => this.renderData()};\n",[53,12075,12076],{"class":55,"line":496},[53,12077,7384],{"class":82},[53,12079,12080],{"class":55,"line":503},[53,12081,282],{"class":82},[18,12083,12084,12085,12087,12088,12090,12091,986],{},"The first step is to extend the ",[50,12086,11234],{},"component with a render property. Instead of returning ",[50,12089,11107],{},"when data is not\nloading we have to call ",[50,12092,12093],{},"this.props.render",[43,12095,12097],{"className":7263,"code":12096,"language":7265,"meta":48,"style":48},"class Waiting extends React.Component {\n constructor() { ... }\n\n componentWillReceiveProps(nextProps) { ... }\n\n+ renderContent() {\n+ return this.state.loading ? \u003Cdiv>loading...\u003C/div> : this.props.render();\n+ }\n+\n render() {\n- return this.state.loading ? \u003Cdiv>loading...\u003C/div> : null;\n+ return this.renderContent();\n }\n}\n",[50,12098,12099,12115,12127,12131,12146,12150,12159,12192,12198,12202,12208,12236,12251,12255],{"__ignoreMap":48},[53,12100,12101,12103,12105,12107,12109,12111,12113],{"class":55,"line":56},[53,12102,7272],{"class":389},[53,12104,10983],{"class":59},[53,12106,7278],{"class":389},[53,12108,10988],{"class":59},[53,12110,986],{"class":82},[53,12112,10993],{"class":59},[53,12114,6176],{"class":82},[53,12116,12117,12119,12122,12125],{"class":55,"line":86},[53,12118,11317],{"class":389},[53,12120,12121],{"class":82},"() { ",[53,12123,12124],{"class":389},"...",[53,12126,11587],{"class":82},[53,12128,12129],{"class":55,"line":126},[53,12130,500],{"emptyLinePlaceholder":499},[53,12132,12133,12135,12137,12139,12142,12144],{"class":55,"line":163},[53,12134,11366],{"class":59},[53,12136,1067],{"class":82},[53,12138,11371],{"class":6186},[53,12140,12141],{"class":82},") { ",[53,12143,12124],{"class":389},[53,12145,11587],{"class":82},[53,12147,12148],{"class":55,"line":186},[53,12149,500],{"emptyLinePlaceholder":499},[53,12151,12152,12154,12157],{"class":55,"line":221},[53,12153,11314],{"class":389},[53,12155,12156],{"class":59}," renderContent",[53,12158,7291],{"class":82},[53,12160,12161,12163,12165,12167,12169,12171,12173,12175,12177,12179,12181,12183,12185,12188,12190],{"class":55,"line":242},[53,12162,11314],{"class":389},[53,12164,11007],{"class":389},[53,12166,11010],{"class":89},[53,12168,11465],{"class":82},[53,12170,11016],{"class":389},[53,12172,11019],{"class":82},[53,12174,7091],{"class":7115},[53,12176,11024],{"class":82},[53,12178,7091],{"class":7115},[53,12180,11029],{"class":82},[53,12182,4101],{"class":389},[53,12184,11010],{"class":89},[53,12186,12187],{"class":82},".props.",[53,12189,10920],{"class":59},[53,12191,7335],{"class":82},[53,12193,12194,12196],{"class":55,"line":273},[53,12195,11314],{"class":389},[53,12197,7384],{"class":82},[53,12199,12200],{"class":55,"line":279},[53,12201,11359],{"class":389},[53,12203,12204,12206],{"class":55,"line":496},[53,12205,11000],{"class":59},[53,12207,7291],{"class":82},[53,12209,12210,12212,12214,12216,12218,12220,12222,12224,12226,12228,12230,12232,12234],{"class":55,"line":503},[53,12211,8653],{"class":389},[53,12213,11007],{"class":389},[53,12215,11010],{"class":89},[53,12217,11465],{"class":82},[53,12219,11016],{"class":389},[53,12221,11019],{"class":82},[53,12223,7091],{"class":7115},[53,12225,11024],{"class":82},[53,12227,7091],{"class":7115},[53,12229,11029],{"class":82},[53,12231,4101],{"class":389},[53,12233,11034],{"class":89},[53,12235,1727],{"class":82},[53,12237,12238,12240,12242,12244,12246,12249],{"class":55,"line":509},[53,12239,11314],{"class":389},[53,12241,11007],{"class":389},[53,12243,11010],{"class":89},[53,12245,986],{"class":82},[53,12247,12248],{"class":59},"renderContent",[53,12250,7335],{"class":82},[53,12252,12253],{"class":55,"line":515},[53,12254,7384],{"class":82},[53,12256,12257],{"class":55,"line":521},[53,12258,282],{"class":82},[607,12260,12261],{},"html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .s9eBZ, html code.shiki .s9eBZ{--shiki-default:#22863A;--shiki-dark:#85E89D}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sJ8bj, html code.shiki .sJ8bj{--shiki-default:#6A737D;--shiki-dark:#6A737D}html pre.shiki code .s4XuR, html code.shiki .s4XuR{--shiki-default:#E36209;--shiki-dark:#FFAB70}",{"title":48,"searchDepth":86,"depth":86,"links":12263},[12264,12265,12266],{"id":10967,"depth":86,"text":10968},{"id":11247,"depth":86,"text":11248},{"id":11798,"depth":86,"text":11799},[613,614],"2017-12-14T12:28:51","Giving fast feedback to users has been improved by single page applications over the request response cycle. However,\\nthere is one serious downside with this approach. Elements are popping out of the wild on various sections everytime.\\nParticular data loading indicated by a waiting animation is affected with this phenomenon. In this blog I’d like to\\npresent you our solution of a UI component that takes care about delaying the rendering of the animation.","https://synyx.de/blog/implementing-a-waiting-component-with-user-experience-in-mind/",{},"/blog/implementing-a-waiting-component-with-user-experience-in-mind",{"title":10898,"description":10907},"blog/implementing-a-waiting-component-with-user-experience-in-mind",[],"Giving fast feedback to users has been improved by single page applications over the request response cycle. However, there is one serious downside with this approach. Elements are popping out of the wild on various sections everytime. Particular data loading indicated by a waiting animation is affected with this phenomenon. In this blog I’d like to present you our solution of a UI component that takes care about delaying the rendering of the animation.","McgKfmY5KyiZhLIHs5tLcAtXDXJuUH3jp9wYxZq7gns",{"id":12279,"title":12280,"author":12281,"body":12283,"category":12439,"date":12440,"description":12441,"extension":617,"link":12442,"meta":12443,"navigation":499,"path":12444,"seo":12445,"slug":12287,"stem":12446,"tags":12447,"teaser":12452,"__hash__":12453},"blog/blog/how-we-ended-up-using-bdd.md","How we ended up using BDD",[12282],"weigel",{"type":11,"value":12284,"toc":12432},[12285,12288,12291,12295,12298,12302,12305,12309,12312,12316,12357,12363,12369,12375,12381,12385,12388,12397,12400],[14,12286,12280],{"id":12287},"how-we-ended-up-using-bdd",[18,12289,12290],{},"It was not our primary goal to use Behaviour Driven Development (BDD) in the project at a customer, but while finding\nand optimizing our agile software development process we recognized that we established the building blocks of BDD. It\nworks quite well and offers a lot of space and flexibility for our future plans, switching our architecture to\nmicroservices.",[649,12292,12294],{"id":12293},"the-project-setup","The project setup",[18,12296,12297],{},"But let us start at the beginning of the project. We started on a green field project but had to embed in a system\ncontext with established interfaces and many third party services as we had to replace a legacy system. Also as a newly\nformed team consisting of internal and external employees we had to find our team spirit and embed into the existing\norganizational structure. With at least one dedicated Product Owner (PO) and a dedicated Scrum Master we started as a\nteam of 7 people. We are one of multiple Scrum teams in an organization sector. The operations team is (still) in\nanother sector and the domain experts or Functional Owner (FO) even in another building. On the one hand it makes\ncommunication on domain topics harder, on the other hand with this setting we had a lot of liberties to build, release\nand deploy whenever we want. Noticing that other teams organized their cyclic release as part of their scrum process\nevery two weeks our way to deploy after feature complete, seemed to be unconventional to others.",[649,12299,12301],{"id":12300},"trust-as-prerequisite-to-keep-liberties-in-development-process","Trust as prerequisite to keep liberties in development process",[18,12303,12304],{},"As time went by we had our first deployment to production. We have been consuming other services and vice versa. While\nmore features were added, the complexity and the amount of deployments to production increased. Our POs’ confidence in\nour unconventional release and deploy process decreased, perhaps feeling that the project got out of hand. Of course we\nsent announcement E-Mails to relevant stakeholders, but somehow we had not built up enough trust yet and our POs still\nhad the desire to make manual tests on the release stage. So we started to create charts of all processes together with\nour POs as part of our development process just before talking about the user story in our refinement. It was and still\nis helpful for two reasons. On the one hand it was creating a format the whole team understands as it was simplifying\nthe process and serving as documentation. On the other hand it was serving as kind of a contract, which we as devs and\nthe POs were committed to. Also we arranged that our POs wrote acceptance tests for the use case. With those newly\nintroduced methods in our development process, confidence and trust increased and we kept our way of releasing and\ndeploying after the feature is “Definition of Done” ready during the sprint.",[649,12306,12308],{"id":12307},"tests-turn-from-verification-to-specification","Tests turn from verification to specification",[18,12310,12311],{},"Again time went by and as the FOs are sitting in another building and in contrast to our POs are feeling the need of\nmanual tests, again the question for a fixed deployment-plan arose. Newly implemented features and more dependencies to\nother teams strengthened the need. To hold on to our flexibility and to keep deploying during our sprint we introduced a\nprocess called “3 Amigo Testing”. Before a story accomplishes the “Definition of Ready” and is thereby ready for our\nrefinement the tests have to be written together with our FOs, our POs and us (one of the devs). Thereby tests turn from\nverification to specification no matter at which level: unit, integration or system integration! In addition the\ndiscussion not only improves the communication between the business and the technical team, it also helps to understand\nthe complexity and the pitfalls early which results in a better estimation.",[649,12313,12315],{"id":12314},"time-to-grasp-the-nettle","Time to grasp the nettle",[18,12317,12318,12319,12324,12325,12330,12331,8713,12336,8713,12341,8713,12346,8713,12351,12356],{},"Technically that means you need an abstraction on top of your automated tests. Behaviour tests should always be written\nfrom a customer’s point of view and describe the acceptance criteria of a use case. Important is, that you use a\nbusiness readable language like ",[585,12320,12323],{"href":12321,"rel":12322},"https://github.com/cucumber/cucumber/wiki/Gherkin",[589],"gherkin",". It describes a test in the\nform of an initial state (“given”), an action (“when”) and a final state (“then”) which is common in the BDD context. By\nthat you have a description of a finite state machine which gives you the ability to describe a feature integrally. It\ndoes not matter at which level of the testing pyramide the actual implementation of the test is settled. A defined\nrequirement may be implemented at unit level as well as at system integration level. We\nuse ",[585,12326,12329],{"href":12327,"rel":12328},"https://cucumber.io/",[589],"cucumber"," because it meets the aforementioned requirements and because of existing knowledge\nand easy integration in our ecosystem. It is easy to use with the spring framework and gives us pretty jenkins\nreportings loved by our POs. But it is\nonly ",[585,12332,12335],{"href":12333,"rel":12334},"http://lettuce.it/",[589],"one",[585,12337,12340],{"href":12338,"rel":12339},"http://spockframework.org/",[589],"of",[585,12342,12345],{"href":12343,"rel":12344},"http://robotframework.org/",[589],"many",[585,12347,12350],{"href":12348,"rel":12349},"http://jbehave.org/",[589],"possible",[585,12352,12355],{"href":12353,"rel":12354},"http://www.thucydides.info/#/",[589],"tools",".\nFinally we recognized, that we established the building blocks of BDD:",[18,12358,12359,12362],{},[27,12360,12361],{},"Define tests beforehand"," – The specification of tests takes place before talking about the story in the refinement.",[18,12364,12365,12368],{},[27,12366,12367],{},"Use ubiquitous language"," – Defining the behaviour together with our POs and FOs we use a common language to avoid\nmisunderstandings.",[18,12370,12371,12374],{},[27,12372,12373],{},"Express all requirements in high-level business terms from the customer point of view"," – The tests serve as the\nacceptance criteria of the story describing the next business value to deliver. Therefore it is defined from the\ncustomer’s perspective.",[18,12376,12377,12380],{},[27,12378,12379],{},"Accessible to all stakeholders"," – The tests, which now serve as documentation, are accessible to all stakeholders via\na reporting plugin provided by our continuous integration tool.",[649,12382,12384],{"id":12383},"conclusion","Conclusion",[18,12386,12387],{},"With that said we built up not only the technical prerequisite to eliminate all manual tests but also gained trust and\nare now ready for continuous deployment. In addition our testsuite turns out to be a helpful backbone for our ongoing\nproject of splitting a monolith into microservices.",[18,12389,12390,12391,12396],{},"If u want to know more about the project join us for our ",[585,12392,12395],{"href":12393,"rel":12394},"https://synyx.de/events/17_08_jugka_microservices_andi/",[589],"talk","\nat Java User Group Karlsruhe!",[18,12398,12399],{},"Sources:",[577,12401,12402,12408,12414,12420,12426],{},[580,12403,12404],{},[585,12405,12406],{"href":12406,"rel":12407},"https://www.agilealliance.org/glossary/bdd",[589],[580,12409,12410],{},[585,12411,12412],{"href":12412,"rel":12413},"https://dannorth.net/introducing-bdd/",[589],[580,12415,12416],{},[585,12417,12418],{"href":12418,"rel":12419},"https://martinfowler.com/bliki/GivenWhenThen.html",[589],[580,12421,12422],{},[585,12423,12424],{"href":12424,"rel":12425},"http://www.agiletestingframework.com/atf/testing/behavior-driven-development-bdd/",[589],[580,12427,12428],{},[585,12429,12430],{"href":12430,"rel":12431},"https://en.wikipedia.org/wiki/Behavior-driven_development",[589],{"title":48,"searchDepth":86,"depth":86,"links":12433},[12434,12435,12436,12437,12438],{"id":12293,"depth":126,"text":12294},{"id":12300,"depth":126,"text":12301},{"id":12307,"depth":126,"text":12308},{"id":12314,"depth":126,"text":12315},{"id":12383,"depth":126,"text":12384},[613,614],"2017-07-31T14:13:17","It was not our primary goal to use Behaviour Driven Development (BDD) in the project at a customer, but while finding\\nand optimizing our agile software development process we recognized that we established the building blocks of BDD. It\\nworks quite well and offers a lot of space and flexibility for our future plans, switching our architecture to\\nmicroservices.","https://synyx.de/blog/how-we-ended-up-using-bdd/",{},"/blog/how-we-ended-up-using-bdd",{"title":12280,"description":12290},"blog/how-we-ended-up-using-bdd",[12448,12449,7611,12450,12451],"bdd","behaviour","driven","trust","It was not our primary goal to use Behaviour Driven Development (BDD) in the project at a customer, but while finding and optimizing our agile software development process we recognized that we established the building blocks of BDD. It works quite well and offers a lot of space and flexibility for our future plans, switching our architecture to microservices. The project setup But let us start at the beginning of the project.","yImgzxEzR7UHl6fIsRCGniHr2xISKHBeIlBhI5IqQOs",{"id":12455,"title":12456,"author":12457,"body":12458,"category":12874,"date":12875,"description":12876,"extension":617,"link":12877,"meta":12878,"navigation":499,"path":12879,"seo":12880,"slug":12462,"stem":12881,"tags":12882,"teaser":12887,"__hash__":12888},"blog/blog/the-struggle-with-hazelcast-queue-persistence.md","The struggle with Hazelcast queue persistence",[7799],{"type":11,"value":12459,"toc":12861},[12460,12463,12466,12470,12473,12482,12485,12502,12517,12523,12527,12535,12538,12541,12545,12548,12551,12628,12632,12635,12638,12641,12644,12653,12657,12660,12671,12676,12679,12682,12686,12689,12775,12779,12782,12785,12788,12791,12795,12798,12801,12816,12819,12823,12826,12835,12839,12842,12845,12859],[14,12461,12456],{"id":12462},"the-struggle-with-hazelcast-queue-persistence",[18,12464,12465],{},"In this blog I will outline why we used Hazelcast for queueing messages in-memory distributed over a cluster and how we\nachieved higher resilience by persisting the queue’s content. I will explain the pitfalls and difficulties that we\nencountered and how I constantly switched between praising and condemning Hazelcast.",[2207,12467,12469],{"id":12468},"the-problem-to-solve","The problem to solve",[18,12471,12472],{},"I’m currently working in a project for a large customer data backend. The prod system consists of a load balanced\ncluster of five VMs each running two Tomcat instances hosting our application. The deployment process performs an A/B\nswitching between the Tomcats on each node to achieve zero downtime. The application has to handle a lot of incoming\ndata and updates and communicates with a lot of external services. At one point we felt the need for a queueing\nmechanism for two reasons:",[12474,12475,12476,12479],"ol",{},[580,12477,12478],{},"Enabling controlled asynchronous processing of tasks inside the application. Example: A synchronous user request\nqueues follow-up tasks to be processed later by another part of the application so the request can deliver the\nresponse quicker to the user.",[580,12480,12481],{},"Queueing and retrying failed calls to external systems for higher resilience",[18,12483,12484],{},"We gathered the following core requirements for the queueing mechanism:",[577,12486,12487,12490,12493,12496,12499],{},[580,12488,12489],{},"Embedded into the application. Using a potentially failing external system would defeat reason 2",[580,12491,12492],{},"Distributed over the cluster. Due to the nature of our data import mechanism one node creates a lot of tasks and the\ncluster should work together to process the tasks.",[580,12494,12495],{},"Resistant to system failure. The queued data is critical – so when one node or even the whole cluster goes down the\ndata should be preserved",[580,12497,12498],{},"Performance. Due to the amount of processed data the solution has to be fast.",[580,12500,12501],{},"Low complexity and easy maintainability. “Keep it simple” is a key ambition for everything that we use or build.",[18,12503,12504,12505,12510,12511,12516],{},"After a short evaluation phase these requirements led us to the conclusion that ",[585,12506,12509],{"href":12507,"rel":12508},"https://hazelcast.org/",[589],"Hazelcast","\nmight be the solution of our problem. It can be embedded as library, its core feature is distributed data structures\nlike maps and queues, it is known to be lightning fast and easy to use. Also it offers backup- and recovery mechanisms\nas well as the possibility to implement persistence for the data structures. And\nit’s ",[585,12512,12515],{"href":12513,"rel":12514},"https://github.com/hazelcast/hazelcast",[589],"open source",", yay!",[18,12518,12519],{},[2223,12520],{"alt":12521,"src":12522},"Logo hazelcast","https://media.synyx.de/uploads/2019/04/hazelcast_logo_small-768x185.png",[2207,12524,12526],{"id":12525},"the-easy-part-divide-and-queue","The easy part – divide and queue",[18,12528,12529,12530,12534],{},"The first implementation of the Hazelcast queue in our application was a piece of cake. Following\nthe ",[585,12531,10174],{"href":12532,"rel":12533},"http://docs.hazelcast.org/docs/3.8.2/manual/html-single/index.html",[589]," we only needed two dependencies\nin our pom.xml, some properties in our application config and one Spring config class and voilà: The distributed\nin-memory queue was ready to use in the code just like every other Java BlockingQueue implementation.",[18,12536,12537],{},"In the first tests we realized how great Hazelcast works. Every queued item was available on all nodes in an instant and\nwe could shut down and restart nodes at will without losing data. The only thing that was a bit trickier was to get\nHazelcast’s network configuration right so the cluster finds its nodes during an A/B deployment without adding nodes\nthat should NOT belong to the cluster.",[18,12539,12540],{},"I will not go into detail on this “easy part” because this blog post should concentrate on the difficulties. All in all\nwe were in awe of Hazelcast’s smoothness at this point.",[2207,12542,12544],{"id":12543},"the-hard-part-persist-the-shit-out-of-it","The hard part – persist the shit out of it",[18,12546,12547],{},"So far, so good. We already managed to meet 90% of our requirements. The last 10% should’t be that difficult, right?\nPfffff let’s just do it!",[18,12549,12550],{},"We wanted to make the data resilient against the improbable event of an outage of the whole cluster. So the data in the\ncluster should be backed up in some kind of persistence and be recovered when the cluster reboots. Hazelcast offers an\nabstract solution for this problem, namely the QueueStore interface. You can implement the interface with every\npersisting technology that you want, add some configuration and all queued data will be mirrored into the data store and\nbe recovered after an eventual downtime.",[43,12552,12554],{"className":288,"code":12553,"language":290,"meta":48,"style":48},"public interface QueueStore\u003CT> {\n\n void store(Long key, T value);\n\n void storeAll(Map\u003CLong, T> map);\n\n void delete(Long key);\n\n void deleteAll(Collection\u003CLong> keys);\n\n T load(Long key);\n\n Map\u003CLong, T> loadAll(Collection\u003CLong> keys);\n\n Set\u003CLong> loadAllKeys();\n}\n",[50,12555,12556,12561,12565,12570,12574,12579,12583,12588,12592,12597,12601,12606,12610,12615,12619,12624],{"__ignoreMap":48},[53,12557,12558],{"class":55,"line":56},[53,12559,12560],{},"public interface QueueStore\u003CT> {\n",[53,12562,12563],{"class":55,"line":86},[53,12564,500],{"emptyLinePlaceholder":499},[53,12566,12567],{"class":55,"line":126},[53,12568,12569],{}," void store(Long key, T value);\n",[53,12571,12572],{"class":55,"line":163},[53,12573,500],{"emptyLinePlaceholder":499},[53,12575,12576],{"class":55,"line":186},[53,12577,12578],{}," void storeAll(Map\u003CLong, T> map);\n",[53,12580,12581],{"class":55,"line":221},[53,12582,500],{"emptyLinePlaceholder":499},[53,12584,12585],{"class":55,"line":242},[53,12586,12587],{}," void delete(Long key);\n",[53,12589,12590],{"class":55,"line":273},[53,12591,500],{"emptyLinePlaceholder":499},[53,12593,12594],{"class":55,"line":279},[53,12595,12596],{}," void deleteAll(Collection\u003CLong> keys);\n",[53,12598,12599],{"class":55,"line":496},[53,12600,500],{"emptyLinePlaceholder":499},[53,12602,12603],{"class":55,"line":503},[53,12604,12605],{}," T load(Long key);\n",[53,12607,12608],{"class":55,"line":509},[53,12609,500],{"emptyLinePlaceholder":499},[53,12611,12612],{"class":55,"line":515},[53,12613,12614],{}," Map\u003CLong, T> loadAll(Collection\u003CLong> keys);\n",[53,12616,12617],{"class":55,"line":521},[53,12618,500],{"emptyLinePlaceholder":499},[53,12620,12621],{"class":55,"line":527},[53,12622,12623],{}," Set\u003CLong> loadAllKeys();\n",[53,12625,12626],{"class":55,"line":533},[53,12627,282],{},[649,12629,12631],{"id":12630},"difficulty-1-how-to-persist","Difficulty #1: How to persist?",[18,12633,12634],{},"After the initial euphoria it began to dawn on us that implementing the Queue Store interface obviously meant choosing\nsome technology to persist data (d’uh). Unfortunately Hazelcast does not offer some kind of default implementation that\nyou just roll with if you want to try it out.",[18,12636,12637],{},"Well ok, how about our database? We did not want to do that at this time. We expected it to be slow and our project has\na history of database-managed queues that didn’t work that well.",[18,12639,12640],{},"The next thing that came in mind was the file system of our application servers. This actually seemed like a viable\nsolution as the queue entries passed to the QueueStore interface are in key-value format and there already are several\nlibraries providing a file-based key-value store.",[18,12642,12643],{},"So the evaluation train departed again and passed several solutions capable of storing key-value pairs in files like\nBerkeley DB, Map DB, Banana DB(!?) and some others.",[18,12645,12646,12647,12652],{},"In the end the train stopped at ",[585,12648,12651],{"href":12649,"rel":12650},"https://github.com/OpenHFT/Chronicle-Map",[589],"ChronicleMap",", an off-heap in-memory map\nthat is mirrored to a file and promises consistency and insane speed. The embedded library is developed by a team of\nprofessionals and supports file access by multiple JVM instances at the same time, which is crucial for our A/B\ndeployment. Long story short: We implemented the ChronicleMap QueueStore and the first local tests delivered the desired\nresults: A Hazelcast cluster with a huge amount of queued data got shutdown completely and restarted again and the data\nwas still there!",[649,12654,12656],{"id":12655},"obstacle-2-i-am-the-persistence-master","Obstacle #2: I am the persistence Master!",[18,12658,12659],{},"The first test on a production-like system with a cluster of multiple VMs seemed promising. After every simulated\ncluster downtime the data was still there. But looking closely at the files written by ChronicleMap we noticed a strange\nthing. Only on one node of the cluster the file size changed, on the other nodes the files got created but stayed on the\nsame size of only a few KB. What was the meaning of this? Why are not all nodes backing up their data? And how was it\npossible for them to recover their data without the file backup?",[18,12661,12662,12663,12666,12667,12670],{},"After some more research we discovered a sentence in the Hazelcast documentation of the ",[27,12664,12665],{},"map"," persistence that was\nmissing in the documentation of the ",[27,12668,12669],{},"queue"," persistence. It says:",[11259,12672,12673],{},[18,12674,12675],{},"NOTE: Data store needs to be a centralized system that is accessible from all Hazelcast members. Persistence to a\nlocal file system is not supported.",[18,12677,12678],{},"Aaaahrgs! That explained the observed behavior! The cluster assumes that all nodes access the same data store and\ndetermines one node to be some kind of persistence master that writes and reads all data from the store for the whole\ncluster! Further tests showed that it seems pretty unpredictable which node becomes the persistence master. If the nodes\nare not restarted in exactly the same order after every deployment it could happen that a different node is assigned\npersistence master and does not recover the data from the previous persistence master. The data would be lost – even\nwithout a cluster downtime. To prevent this we had to provide a centralized data store!",[18,12680,12681],{},"The situation was not critical as the persistence implementation had not been merged to master yet – but admittedly we\nwere in some kind of frustration mode at that point and the first reflex was to centralize the ChroniclaMap file so we\ndid not have to change the implementation again. After hard negotiations our ops team grudgingly provided us with a test\nsystem of multiple VMs all accessing the same file on a NFS share. As expected it worked, but it didn’t feel right. We\ndecided to run a long term test with production-like data and to decide afterwards if the solution is good enough to be\nrolled out on production.",[649,12683,12685],{"id":12684},"stumbling-block-3-configuring-the-queuestore","Stumbling block #3: Configuring the QueueStore",[18,12687,12688],{},"This was only a minor issue but it added up with the uneasy mood that we had about our solution at that point. The\nHazelcast queue is configured programmatically via a Spring Configuration Class. During our development we noticed that\nneither of our configuration changes to the queue persistence seemed to have any effect. It turned out that the\nQueueStore only accepts strings as configuration parameters which it not obvious at the first glance when using a\njava.util.Properties object to pass the properties, which accepts Object as type.",[43,12690,12692],{"className":288,"code":12691,"language":290,"meta":48,"style":48},"// no Strings - does not work\nQueueStoreConfig queueStoreConfig = new QueueStoreConfig();\nqueueStoreConfig.setEnabled(true);\nProperties properties = new Properties();\nproperties.put(\"binary\", true);\nproperties.put(\"memory-limit\", 0);\nproperties.put(\"bulk-load\", 4L);\nqueueStoreConfig.setProperties(properties);\nqueueStoreConfig.setStoreImplementation(new ChronicleMapQueueStore());\n\n// Strings - does work\nQueueStoreConfig queueStoreConfig = new QueueStoreConfig();\nqueueStoreConfig.setEnabled(true);\nqueueStoreConfig.setProperty(\"binary\", \"false\");\nqueueStoreConfig.setProperty(\"memory-limit\", \"0\");\nqueueStoreConfig.setProperty(\"bulk-load\", \"4\");\nqueueStoreConfig.setStoreImplementation(new ChronicleMapQueueStore());\n",[50,12693,12694,12699,12704,12709,12714,12719,12724,12729,12734,12739,12743,12748,12752,12756,12761,12766,12771],{"__ignoreMap":48},[53,12695,12696],{"class":55,"line":56},[53,12697,12698],{},"// no Strings - does not work\n",[53,12700,12701],{"class":55,"line":86},[53,12702,12703],{},"QueueStoreConfig queueStoreConfig = new QueueStoreConfig();\n",[53,12705,12706],{"class":55,"line":126},[53,12707,12708],{},"queueStoreConfig.setEnabled(true);\n",[53,12710,12711],{"class":55,"line":163},[53,12712,12713],{},"Properties properties = new Properties();\n",[53,12715,12716],{"class":55,"line":186},[53,12717,12718],{},"properties.put(\"binary\", true);\n",[53,12720,12721],{"class":55,"line":221},[53,12722,12723],{},"properties.put(\"memory-limit\", 0);\n",[53,12725,12726],{"class":55,"line":242},[53,12727,12728],{},"properties.put(\"bulk-load\", 4L);\n",[53,12730,12731],{"class":55,"line":273},[53,12732,12733],{},"queueStoreConfig.setProperties(properties);\n",[53,12735,12736],{"class":55,"line":279},[53,12737,12738],{},"queueStoreConfig.setStoreImplementation(new ChronicleMapQueueStore());\n",[53,12740,12741],{"class":55,"line":496},[53,12742,500],{"emptyLinePlaceholder":499},[53,12744,12745],{"class":55,"line":503},[53,12746,12747],{},"// Strings - does work\n",[53,12749,12750],{"class":55,"line":509},[53,12751,12703],{},[53,12753,12754],{"class":55,"line":515},[53,12755,12708],{},[53,12757,12758],{"class":55,"line":521},[53,12759,12760],{},"queueStoreConfig.setProperty(\"binary\", \"false\");\n",[53,12762,12763],{"class":55,"line":527},[53,12764,12765],{},"queueStoreConfig.setProperty(\"memory-limit\", \"0\");\n",[53,12767,12768],{"class":55,"line":533},[53,12769,12770],{},"queueStoreConfig.setProperty(\"bulk-load\", \"4\");\n",[53,12772,12773],{"class":55,"line":539},[53,12774,12738],{},[649,12776,12778],{"id":12777},"anxiety-4-no-transactions-obviously","Anxiety #4: No transactions – obviously",[18,12780,12781],{},"After testing our solution for a while, more and more scenarios of potential data loss popped into our mind. We’ve been\naware that neither Hazelcast nor ChronicleMap offer some kind of real transactions when writing to the file.\nTheoretically the persistence master could be killed off during a write operation and the file could be left in an\ninconsistent state. We tested this scenario with a manually corrupted file and it resulted in the unpleasant situation\nthat the ChronicleMap Spring bean could not be initialized, preventing the creation of the Spring ApplicationContext and\nconsequentially stopping the application startup – not good.",[18,12783,12784],{},"To feel safe about our solution we needed a transactional, central data store. Captain Obvious knocked on the door and\nsaid “Helloooo? Database?”.",[18,12786,12787],{},"We reconsidered this option again. The database is transactional, it is centralized and it does not count as external\nsystem because it is so essential for our application that when the database is down, the application is down anyway. It\nstill wouldn’t be a database managed queue because the queue still lies in-memory and the queueing mechanism is managed\nby Hazelcast. The database would be just a backup. Of course performance would be a potential issue but we were ready to\ngive it a try.",[18,12789,12790],{},"So finally we decided to change the QueueStore implementation to persist to a key-value table into our database.",[649,12792,12794],{"id":12793},"wtf-5-its-a-real-bug","WTF #5: It’s a real bug!",[18,12796,12797],{},"Feeling better with this persistence approach we pushed closer to a production release of the feature. But suddenly we\nhad massive performance difficulties with production-like data. Reading 1000 entries from the queue took several\nminutes! Was it the fault of the database? Was it really that slow?",[18,12799,12800],{},"After extensive analysis we found out that as a matter of fact it was a bug in the otherwise really robust and stable\nHazelcast. When using the drainTo(numberOfEntries) method to get data from the queue the data should be loaded from the\npersisted data in bulks of a configurable size calling the QueueStore.loadAll(listOfEntries) method once for every bulk.\nInstead loadAll() got called once for the first bulk and then once for every single following item, resulting in almost\nthe same number of database calls as the number of requested items.",[18,12802,12803,12804,12809,12810,12815],{},"I recently opened an ",[585,12805,12808],{"href":12806,"rel":12807},"https://github.com/hazelcast/hazelcast/issues/10621",[589],"issue"," for the bug including a\nsmall ",[585,12811,12814],{"href":12812,"rel":12813},"https://github.com/indyarni/hazelcastbugdemo",[589],"demo project",". The Hazelcast team reacted on the same day promising\nto fix it. One week later the issue was fixed and the fix to be released in the next version 3.9! Kudos to the\nHazelcast developers!",[18,12817,12818],{},"Until we are able to use 3.9 we solved the problem with a temporary workaround, setting the bulk size the same as the\nnumber of drained entries, resulting in only one bulk loaded at a time and only one database call per drain.",[649,12820,12822],{"id":12821},"_6-going-prod-finally-successful","(╯°□°)╯︵ ┻━┻ #6: Going Prod – finally successful?",[18,12824,12825],{},"Having cleared this last issue and experiencing no further problems or data loss on the test system for weeks we felt\nconfident to merge the persistence solution to master and go live with it. After the release we were relieved to see\nthat it just worked! Seemingly no problems, no data loss, no performance problems – the application became a lot faster\nand more resilient.",[18,12827,12828,12829,12834],{},"And here comes the “but”: But we sometimes still observe some (5-10) lost items from the in-memory queue after\nperforming a deployment on the cluster. That means that Hazelcast unexpectedly is not always able to synchronize the\ncluster in time when our deployment performs the A/B switch on one node after another. It is not a critical problem\nbecause the lost items can be manually recovered from the database but obviously we still intend to fix this issue. The\ncause is possibly ",[585,12830,12833],{"href":12831,"rel":12832},"https://github.com/hazelcast/hazelcast/issues/5444",[589],"this issue"," that has been fixed in Hazelcast\n3.7. Problem is, we rolled out 3.6.3 on production which is incompatible with 3.7 and newer versions, which means\nto update Hazelcast we would need a cluster downtime…. AAAAAHRG – the story continues 🙂",[2207,12836,12838],{"id":12837},"final-words","Final words",[18,12840,12841],{},"Those were only the most dominant of the many challenges we encountered while implementing this solution. Others were\ne.g. problems with Spring transaction handling when Hazelcast internally opened new threads to call the persistence\ninterface, analyzing different causes of data loss, etc, etc….",[18,12843,12844],{},"After this Odyssey we can draw some conclusions:",[577,12846,12847,12850,12853,12856],{},[580,12848,12849],{},"Hazelcast is a great tool! It’s fun to work with, the core features work reaaaally well and I would use it again.",[580,12851,12852],{},"However the non-core functionalities (like queue persistence) require some effort to get them working in a complex\nenvironment.",[580,12854,12855],{},"When using a new tool you should read the documentation carefully and try to understand how it really works!",[580,12857,12858],{},"If you really want to understand how Hazelcast works it’s not enough to read the documentation carefully 😉",[607,12860,989],{},{"title":48,"searchDepth":86,"depth":86,"links":12862},[12863,12864,12865,12873],{"id":12468,"depth":86,"text":12469},{"id":12525,"depth":86,"text":12526},{"id":12543,"depth":86,"text":12544,"children":12866},[12867,12868,12869,12870,12871,12872],{"id":12630,"depth":126,"text":12631},{"id":12655,"depth":126,"text":12656},{"id":12684,"depth":126,"text":12685},{"id":12777,"depth":126,"text":12778},{"id":12793,"depth":126,"text":12794},{"id":12821,"depth":126,"text":12822},{"id":12837,"depth":86,"text":12838},[613,614],"2017-06-09T14:50:15","In this blog I will outline why we used Hazelcast for queueing messages in-memory distributed over a cluster and how we\\nachieved higher resilience by persisting the queue’s content. I will explain the pitfalls and difficulties that we\\nencountered and how I constantly switched between praising and condemning Hazelcast.","https://synyx.de/blog/the-struggle-with-hazelcast-queue-persistence/",{},"/blog/the-struggle-with-hazelcast-queue-persistence",{"title":12456,"description":12465},"blog/the-struggle-with-hazelcast-queue-persistence",[12883,12884,12885,12886],"cluster","hazelcast","persistence","resilience","In this blog I will outline why we used Hazelcast for queueing messages in-memory distributed over a cluster and how we achieved higher resilience by persisting the queue’s content. I will explain the pitfalls and difficulties that we encountered and how I constantly switched between praising and condemning Hazelcast. The problem to solve I’m currently working in a project for a large customer data backend. The prod system consists of a load balanced cluster of five VMs each running two Tomcat instances hosting our application.","qoLs1ff6I5-z82eh57RtHIy3wXq0OEZ5qLCROZHwoyo",{"id":12890,"title":12891,"author":12892,"body":12894,"category":12966,"date":12967,"description":12968,"extension":617,"link":12969,"meta":12970,"navigation":499,"path":12971,"seo":12972,"slug":12898,"stem":12973,"tags":12974,"teaser":12975,"__hash__":12976},"blog/blog/karlsruher-entwicklertage-2017-conference-day.md","Karlsruher Entwicklertage 2017 – Conference Day",[12893],"kuehn",{"type":11,"value":12895,"toc":12957},[12896,12899,12902,12905,12909,12912,12916,12919,12923,12926,12930,12933,12937,12940,12944,12947,12951,12954],[14,12897,12891],{"id":12898},"karlsruher-entwicklertage-2017-conference-day",[18,12900,12901],{},"Am 22.5. habe ich zum ersten Mal die Karlsruher Entwicklertage besucht. Insgesamt war die Veranstaltung gut\norganisiert, man bekam schnell und unkompliziert seine Badge und die obligatorische Info-Tüte. Es gab eine große\nAuswahl an verschiedenen Talks, die auf 6 parallele Tracks aufgeteilt waren. Die einzelnen Tracks wurden unter anderem\nvon einigen Usergroups wie z.B. der Java Usergroup, der .NET-Usergroup und dem lokalen OWASP-Chapter organisiert.\nDanke an die ehrenamtlichen Helfer für das Engageme",[18,12903,12904],{},"Hier meine Eindrücke der Talks, die ich besucht habe:",[649,12906,12908],{"id":12907},"sinn-und-nutzen-von-restful-hypermedia-apis","Sinn und Nutzen von RESTful-Hypermedia-APIs",[18,12910,12911],{},"Kai Tödter gab in seiner Keynote eine Einführung in REST und Hypermedia, und stellte verschiedene Frameworks vor, mit\nderen Hilfe entsprechende Schnittstellen in Java und .NET implementiert werden können. Die Ausrichtung war sehr\ndetailliert und technisch, mehr ein kompletter Vortrag als eine Keynote. Obwohl ich bei synyx schon mit Hypermedia und\nSpring HATEOAS in Berührung gekommen bin, war es interessant einige Grundlagen und Architektur-Überlegungen zu sehen.",[649,12913,12915],{"id":12914},"ops-for-developers-monitoring-mit-prometheus-für-java-entwickler","Ops for Developers – Monitoring mit Prometheus für Java Entwickler",[18,12917,12918],{},"Trotz meiner Vorerfahrung mit InfluxDB, Prometheus und Grafana ist es für mich immer ein Zugewinn, neue Talks zum Thema\nMonitoring und Metriken zu hören. Nach einer kurzen Einführung zeigte Alexander Schwarz an einfachen Beispielen, wie man\nin verschiedenen Frameworks Metric-Endpoints bereitstellen kann, um sie mit Prometheus zu scrapen. Zusätzlich erklärte\ner wie man entsprechende Auswertungen in Prometheus bzw. passende Dashboards mit Grafana erzeugen kann, um Schlüsse über\nAuslastung und Laufzeitdaten aus seiner Anwendung zu ziehen.",[649,12920,12922],{"id":12921},"security-requirements-im-software-development-lifecycle","Security Requirements im Software Development Lifecycle",[18,12924,12925],{},"Daniel Kefer und René Reuter stellten ihr Tool SecurityRat (Security Requirement Automation Tool) vor, das unter anderem\nauch als OWASP-Projekt anerkannt und unterstützt wird. Mit SecurityRat lassen sich Security-Requirements auf laufende\nund neue Software-Projekte abbilden. Aus einem Katalog von Requirements werden durch die Auswahl bestimmter Kriterien\nsicherheitsrelevante Tasks und TODOs vorgeschlagen und lassen sich automatisch als JIRA-Tickets in der entsprechenden\nSW-Projekt-Queue abbilden. Im Vordergrund steht hier die Zeitersparnis, da in viele Firmen die\nSecurity-Verantwortlichen für mehrere Projekte verantwortlich sind. Durch die Abstraktion von Standardarbeiten wie dem\nAnlegen von Tickets und dem Erstellen von Checklisten werden diese entlastet. Im weiteren gaben sie einen Ausblick auf\nihr neues Tool SecurityCat (Security Compliance and Automated Testing), das als Teil der Continuous Integration\nToolchain Sicherheitstests durchführen soll.",[649,12927,12929],{"id":12928},"security-baselines-für-web-applikationen-in-der-praxis-oder-wieviel-sicherheit-darfs-denn-sein","Security Baselines für Web-Applikationen in der Praxis oder: „Wieviel Sicherheit darf’s denn sein?“",[18,12931,12932],{},"Ingo Hanke zeigte die Schwierigkeit auf, Kunden einen Mindeststandard an IT-Sicherheit als Teil der Dienstleistung zu\nverkaufen. Zentrales Argument war es, eine nachhaltige Lösung und Verbesserung für den Kunden zu schaffen. Da die\nBSI-Empfehlungen für die Basissicherheit in Unternehmen für viele Kleinunternehmer seiner Meinung nach nicht machbar\nsind, erklärte er, welche Grundsicherungsmaßnahmen er empfiehlt und mit den Kunden eruiert. Er verwendet hierzu eine Art\nBaukastenprinzip, um Gefahrenquellen in den Unternehmen zu evaluieren und entsprechende Maßnahmen anzubieten. Seiner\nErfahrung nach haben viele Firmen keine Vorstellung und Vorkehrungen im Bereich IT-Sicherheit und können hier durch\ngeringen Aufwand großen Nutzen erzielen. Aus seinem Talk gingen u.a. die Empfehlung hervor, mit neuen Kunden einen\n2-stündigen Workshop zu machen, bei dem unbedingt ein Entscheider dabeisein sollte. Zusätzlich empfiehlt er der\nSoftware-Branche, Sicherheitsmaßnahmen in ihren Dienstleistungen einzupreisen und im Zweifelsfall auf Billig-Projekte\nzu verzichten.",[649,12934,12936],{"id":12935},"sicher-in-die-cloud-mit-angular-2-und-spring-boot","Sicher in die Cloud mit Angular 2 und Spring Boot",[18,12938,12939],{},"Andreas Falk zeigte in einem sehr beispiellastigen Vortrag die Gefahren bzw. Anfälligkeiten von modernen\nWebapplikationen am Beispiel von Angular2 als Frontend mit Springboot im Backend. Themen waren hier unter anderem die\nverschiedenen Schnittstellen, die das jeweilige Framework anbietet, und wie die implizite Absicherung z.b. in Angular2\ngegen Angriffe wie Cross-Site-Scripting (XSS) und Cross-Site-Request-Forgery (CSRF) funktioniert. Der Vortrag\norientierte sich am OWASP Top10 Projekt (Version 2017 RC1). Andreas erklärte anschaulich, welche Implikationen die in\nder Top10 aufgeführten Risiken mit sich bringen.",[649,12941,12943],{"id":12942},"löschen-löschen-löschen","Löschen? Löschen. Löschen!",[18,12945,12946],{},"Volker Hammer referierte über Datenschutz und die notwendigen Pflichten von Service-Anbietern bei der Datenhaltung und\n-Vernichtung. Am Beispiel von TollCollect wurde gezeigt, welche Aufwände anfallen können, bis ein Standard entwickelt\nwerden kann, der als DIN-Norm anerkannt wird. In einem Konglomerat aus mehreren großen deutschen Firmen ist in 11\nJahren die DIN 66398 “Leitlinie zur Entwicklung eines Löschkonzepts mit Ableitung von Löschfristen für personenbezogene\nDaten“ entstanden. Neben der Beschreibung waren hier auch die Erkenntnisse interessant, die man bei der Erstellung der\nNorm und dem Festlegen der Löschkonzepte erkannt hat, u.a. die Vereinfachung der Datenhaltung und -Sicherung.",[649,12948,12950],{"id":12949},"keynote-software-engineering-the-roots","Keynote: Software Engineering – the roots",[18,12952,12953],{},"In der Abschluss-Keynote sprach Fath Al-Fatish über die Aufwände in der Softwareentwicklung, die zu über 50% in der\nWartung liegen und beklagte die mangelnde Aus- und Weiterbildung der Softwareentwickler. Auf spassige Art erklärte er\ndie Problematik in der Aussensicht der Branche aufgrund von fehlendem Verständniss und Interesse an Nachhaltigkeit. In\nseinem Beitrag verwies er auf Arbeitsweisen und Verbesserungspotential. Innerlich habe ich mich an dieser Stelle leider\ngefragt, was in vielen anderen Betrieben schiefgeht, und habe mich hier bei allen Argumenten und Lösungsvorschlägen\ninnerlich gefreut , da ich alles positive bei synyx wiederfinde.",[18,12955,12956],{},"Insgesamt eine empfehlenswerte Konferenz, nicht mein letzter Besuch!",{"title":48,"searchDepth":86,"depth":86,"links":12958},[12959,12960,12961,12962,12963,12964,12965],{"id":12907,"depth":126,"text":12908},{"id":12914,"depth":126,"text":12915},{"id":12921,"depth":126,"text":12922},{"id":12928,"depth":126,"text":12929},{"id":12935,"depth":126,"text":12936},{"id":12942,"depth":126,"text":12943},{"id":12949,"depth":126,"text":12950},[613,614],"2017-05-23T14:57:28","Am 22.5. habe ich zum ersten Mal die Karlsruher Entwicklertage besucht. Insgesamt war die Veranstaltung gut\\norganisiert, man bekam schnell und unkompliziert seine Badge und die obligatorische Info-Tüte. Es gab eine große\\nAuswahl an verschiedenen Talks, die auf 6 parallele Tracks aufgeteilt waren. Die einzelnen Tracks wurden unter anderem\\nvon einigen Usergroups wie z.B. der Java Usergroup, der .NET-Usergroup und dem lokalen OWASP-Chapter organisiert.\\nDanke an die ehrenamtlichen Helfer für das Engageme","https://synyx.de/blog/karlsruher-entwicklertage-2017-conference-day/",{},"/blog/karlsruher-entwicklertage-2017-conference-day",{"title":12891,"description":12901},"blog/karlsruher-entwicklertage-2017-conference-day",[],"Am 22.5. habe ich zum ersten Mal die Karlsruher Entwicklertage besucht. Insgesamt war die Veranstaltung gut organisiert, man bekam schnell und unkompliziert seine Badge und die obligatorische Info-Tüte. Es gab eine große Auswahl an verschiedenen Talks, die auf 6 parallele Tracks aufgeteilt waren. Die einzelnen Tracks wurden unter anderem von einigen Usergroups wie z.B. der Java Usergroup, der .NET-Usergroup und dem lokalen OWASP-Chapter organisiert. Danke an die ehrenamtlichen Helfer für das Engageme","yL6rqaMobpSi3DAnvXOcesuCqY6XUSrTSNz27zUGzfg",{"id":12978,"title":12979,"author":12980,"body":12982,"category":13160,"date":13161,"description":13162,"extension":617,"link":13163,"meta":13164,"navigation":499,"path":13165,"seo":13166,"slug":12986,"stem":13168,"tags":13169,"teaser":13174,"__hash__":13175},"blog/blog/validating-internal-structure-dependencies-using-intellij-idea.md","Validating internal structure / dependencies using IntelliJ IDEA",[12981],"kannegiesser",{"type":11,"value":12983,"toc":13158},[12984,12987,13013,13039,13042,13056,13067,13073,13091,13097,13100,13107,13113,13116,13127,13133,13136,13142,13150],[14,12985,12979],{"id":12986},"validating-internal-structure-dependencies-using-intellij-idea",[18,12988,12989,12990,4816,12995,13000,13001,13006,13007,13012],{},"There are several different tools to maintain the internal structure of a java application available. The tools range\nfrom simple open source software like",[585,12991,12994],{"href":12992,"rel":12993},"https://github.com/clarkware/jdepend",[589],"jdepend",[585,12996,12999],{"href":12997,"rel":12998},"https://web.archive.org/web/20200924121825/http://blog.schauderhaft.de/degraph/",[589],"degraph"," to full fledged\narchitecture tooling like ",[585,13002,13005],{"href":13003,"rel":13004},"http://structure101.com/",[589],"Structure101","\nor ",[585,13008,13011],{"href":13009,"rel":13010},"https://www.hello2morrow.com/products/sonargraph/architect9",[589],"Sonargraph Architect",". All these provide methods to\ndefine the internal structure of an application and validate it somehow.",[18,13014,13015,13016,13021,13022,13027,13028,13033,13034,986],{},"Since we are using ",[585,13017,13020],{"href":13018,"rel":13019},"https://www.jetbrains.com/idea/",[589],"IntelliJ IDEA"," in many of our teams I’d like to show a handy little\nfeature of the IDE that helps in maintaining a structured application:In IDEA you\ncan ",[585,13023,13026],{"href":13024,"rel":13025},"https://www.jetbrains.com/help/idea/2017.1/scopes.html",[589],"define scopes"," and match your code to them. You can then\nrule how they may or may not access each other using\nthe ",[585,13029,13032],{"href":13030,"rel":13031},"https://www.jetbrains.com/help/idea/2017.1/dependency-viewer.html",[589],"Dependency Viewer",". As soon as you do this the\nIDE warns you about illegal package access just as you type. Note that the shown features currently work in the\nfree ",[585,13035,13038],{"href":13036,"rel":13037},"https://www.jetbrains.com/idea/#chooseYourEdition",[589],"IntelliJ IDEA Community Edition",[18,13040,13041],{},"Imagine we have a simple layered application with Java packages representing these layers:",[577,13043,13044,13047,13050,13053],{},[580,13045,13046],{},"api: API-Layer features controllers that are responsible to render a RESTful API to our application",[580,13048,13049],{},"business: our higher level operations/business logic happens here",[580,13051,13052],{},"persistence: this layer is responsible for storing data and providing access to it",[580,13054,13055],{},"domain: In addition we have a domain “layer” where all the layers share their common domain",[18,13057,13058,13059,13062,13063,13066],{},"We can now define scopes for each of these layers. We can do so using ",[573,13060,13061],{},"File -> Settings"," and then select\n",[573,13064,13065],{},"Appearance & Behaviour -> Scopes",". Here we can add new scopes and assign files to it using a pattern or by\nnavigating to the corresponding packages and include/exclude them.",[18,13068,13069],{},[2223,13070],{"alt":13071,"src":13072},"\"Creating Scopes using Settings\"","https://media.synyx.de/uploads//2017/05/1-creating-sopes.png",[18,13074,13075,13076,13079,13080,13083,13084,13087,13088],{},"We then define archuitectural constraints on these scopes using the ",[573,13077,13078],{},"Analyze Dependencies View"," (accessible using the\n",[573,13081,13082],{},"Analyze"," Menu). We do so by clicking the ",[573,13085,13086],{},"Edit Rules Icon"," in the ",[573,13089,13090],{},"Dependency-View.",[18,13092,13093],{},[2223,13094],{"alt":13095,"src":13096},"\"Defining rules\"","https://media.synyx.de/uploads//2017/05/2-defining-rules.png",[18,13098,13099],{},"In the example above configured simple constraints for our layered architecture: We only allow access between layers\nfrom an upper layer to a lower layer (API to Business and Business to Persistence) and we dont allow Domain to access\nany other layer.",[18,13101,13102,13103,13106],{},"When we return to the ",[573,13104,13105],{},"Dependency-View"," and re-run the analysis it displays all the violations of our architecture.\nAlso, we get the analysis as soon as we type a new Validation in the Editor.",[18,13108,13109],{},[2223,13110],{"alt":13111,"src":13112},"\"Violations view from dependencies\"","https://media.synyx.de/uploads//2017/05/3-violations-in-dependency-view.png",[18,13114,13115],{},"You can see in the screenshot that we have an illegal access from a class in scope Persistence that accesses something\nin scope Business. We can simply fix this by moving the Initializer to the business-Package and the error disappears.",[18,13117,13118,13119,13122,13123,13126],{},"As an alternative we can also access the information by running the analysis ",[573,13120,13121],{},"Illegal package dependencies"," (e.g. using\nthe ",[573,13124,13125],{},"Analyze – > Run inspection by name",") dialog. From there we can also edit our rules and navigate to the violating\ncode.",[18,13128,13129],{},[2223,13130],{"alt":13131,"src":13132},"\"Violations-View from inspections\"","https://media.synyx.de/uploads//2017/05/4-violations-in-inspections-view.png",[18,13134,13135],{},"If you enable the shared option for scopes IDEA will also write the configuration to .idea/scopes from where you can\nshare them with your team members.",[18,13137,13138],{},[2223,13139],{"alt":13140,"src":13141},"\"Scope definitions in .idea/scopes\"","https://media.synyx.de/uploads//2017/05/5-export.png",[18,13143,13144,13145,986],{},"To test and experiment with scopes yourself you can build on\nmy",[585,13146,13149],{"href":13147,"rel":13148},"https://github.com/marckanneg/idea-scopes-demo",[589],"my demo-project",[18,13151,13152,13153,13157],{},"You can clone it\nfrom ",[585,13154,13155],{"href":13155,"rel":13156},"https://github.com/marckanneg/idea-scopes-demo.git",[589]," and open\nit using your IntelliJ IDEA(File ->Open).",{"title":48,"searchDepth":86,"depth":86,"links":13159},[],[613],"2017-05-16T12:18:36","There are several different tools to maintain the internal structure of a java application available. The tools range\\nfrom simple open source software likejdepend\\nand degraph to full fledged\\narchitecture tooling like Structure101\\nor Sonargraph Architect. All these provide methods to\\ndefine the internal structure of an application and validate it somehow.","https://synyx.de/blog/validating-internal-structure-dependencies-using-intellij-idea/",{},"/blog/validating-internal-structure-dependencies-using-intellij-idea",{"title":12979,"description":13167},"There are several different tools to maintain the internal structure of a java application available. The tools range\nfrom simple open source software likejdepend\nand degraph to full fledged\narchitecture tooling like Structure101\nor Sonargraph Architect. All these provide methods to\ndefine the internal structure of an application and validate it somehow.","blog/validating-internal-structure-dependencies-using-intellij-idea",[13170,13171,6884,13172,13173],"idea","layers","software-qualitat","softwarearchitektur","There are several different tools to maintain the internal structure of a java application available. The tools range from simple open source software like jdepend and degraph to full fledged architecture…","nRJYUHMxI944vdsZA69SvSdJ70YYEvHqera1wPQeKgI",{"id":13177,"title":13178,"author":13179,"body":13181,"category":13412,"date":13413,"description":13414,"extension":617,"link":13415,"meta":13416,"navigation":499,"path":13417,"seo":13418,"slug":13185,"stem":13419,"tags":13420,"teaser":13428,"__hash__":13429},"blog/blog/visualising-sensors-and-coffee-machines-with-esp8266-mqtt-influxdb-and-grafana.md","Visualising sensors and coffee machines with ESP8266, MQTT, InfluxDB and Grafana",[13180],"posch",{"type":11,"value":13182,"toc":13410},[13183,13186,13189,13192,13195,13198,13201,13206,13212,13215,13226,13229,13234,13237,13242,13245,13250,13256,13259,13262,13265,13270,13273,13276,13281,13284,13290,13293,13296,13299,13302,13305,13310,13313,13316,13330,13333,13336,13341,13344,13349,13352,13357,13360,13363,13366,13369,13372,13389,13394,13402],[14,13184,13178],{"id":13185},"visualising-sensors-and-coffee-machines-with-esp8266-mqtt-influxdb-and-grafana",[18,13187,13188],{},"A few months ago, here at the synyx office we started out with a simple idea: hook up a couple of sensors to an ESP8266\nmodule (or twenty) and have it write its data somewhere for visualisation purposes. Then we got creative.",[18,13190,13191],{},"Our current setup consists out of a number of ESP8266 (currently NodeMCU) boards, featuring humidity (DHT-22, BME280)\nas well as CO2 (MH-Z14) sensors which publish their data over MQTT. In addition, we also hooked the Jura coffee\nmachines in the office up to a couple of ESP8266 modules via their serial interface to read out statistics.",[18,13193,13194],{},"All of this is managed centrally from a command & control server (C&C) which communicates over the MQTT broker (\nMosquitto), with the nodes actively announcing themselves when they come online and receiving their configuration from\nthe C&C. ESP8266 firmware updates are provided as Over The Air (OTA) updates via HTTP.",[18,13196,13197],{},"A custom MQTT-to-Influx service (based on libmosquitto and POCO) is used to write published sensor and coffee machine\nMQTT events into the InfluxDB instance using its HTTP line protocol. A Grafana instance then uses these time series to\nshow current temperatures, humidity and CO2 levels, as well as coffee use on a single dashboard.",[18,13199,13200],{},"The resulting dashboard looks like this:",[18,13202,13203],{},[2223,13204],{"alt":48,"src":13205},"https://media.synyx.de/uploads//2017/03/iot_grafana_dashboard.png",[18,13207,13208],{},[585,13209,13210],{"href":13210,"rel":13211},"https://snapshot.raintank.io/dashboard/snapshot/Ept58LQH5U8sRSW7LP9hl17Cajy0T7i4",[589],[18,13213,13214],{},"It shows the data from a total of four ESP8266-based nodes:",[577,13216,13217,13220,13223],{},[580,13218,13219],{},"One with a single DHT-22 temperature/humidity sensor.",[580,13221,13222],{},"One with a DHT-22 and MH-Z14 CO2 sensor.",[580,13224,13225],{},"Two connected to a single coffee machine each.",[18,13227,13228],{},"Although the coffee machines in question can produce more than just coffee and espresso, we deliberately limited\nourselves to these two counters so that we would not have to set up a separate dashboard (or two) for just the coffee\nmachines to display the counter for the dozens of products they offer 🙂",[18,13230,13231],{},[27,13232,13233],{},"Infrastructure",[18,13235,13236],{},"Visualised the infrastructure we created looks like this:",[18,13238,13239],{},[2223,13240],{"alt":48,"src":13241},"https://media.synyx.de/uploads//2017/03/iot_infra.png",[18,13243,13244],{},"Central to everything is the Mosquitto MQTT broker. It facilitates communication between the ESP8266 nodes, C&C and\nInfluxDB.",[18,13246,13247],{},[27,13248,13249],{},"The ESP8266 nodes",[18,13251,13252,13253,13255],{},"For the firmware of the ESP8266 (ESP-12E-based NodeMCU) boards we use C++ and the Sming ",[53,13254,2546],{}," framework. The latter\nis compatible with Arduino libraries, but allows one to use a callback-based system instead of the loop-based system\nof Arduino, in addition to allowing one to use the full C++ language instead of being limited to the Arduino C\ndialect.",[18,13257,13258],{},"The firmware is identical across all nodes, using a modular (class-based) system to allow each module (DHT, CO2, Jura,\nJura Terminal) to be enabled, disabled and configured from the C&C using MQTT messages. The current firmware image is\njust a tad over 264 kB in size, easily fitting within the 1 MB slot allocated by the rboot boot manager.",[18,13260,13261],{},"As we use ESP-12E ESP8266 modules, we have 4 MB of Flash, split up into two slots for firmware (one active, one as\nbackup or OTA update target) and 1 MB of Flash for storage (using the Spiffs filesystem) for each slot. At this point we\ndo however not store any data locally on the nodes.",[18,13263,13264],{},"With the use of the rboot boot manager, we also gain easy access to HTTP-based OTA updates. An MQTT message from C&C\ntriggers the process, telling Sming’s rboot HTTP update class to fetch the firmware image from the HTTP server, write it\nto the other firmware image slot and boot from it.",[18,13266,13267],{},[27,13268,13269],{},"Sensors",[18,13271,13272],{},"We currently use the DHT-22 sensors for humidity and temperature using the Sming-provided DHT library, but will likely\nswitch to the much more accurate BME280 (Bosch-manufactured) sensors. These also provide air pressure and take up less\nspace than a single DHT-22 sensor.",[18,13274,13275],{},"The MH-Z14 CO2 sensor has an analogue output, a PWM output and a UART (bi-directional). Of these we use the UART (\nconnected to UART0 on the ESP-12E) interface primarily for its ease of use. After querying the sensor, we get a\nresponse from which we can easily calculate the current CO2 level (in parts per million).",[18,13277,13278],{},[27,13279,13280],{},"Coffee machine",[18,13282,13283],{},"At the office we have multiple Jura coffee machines. Being higher-end machines they come with a DE-9f serial connector\non the back which provides a TTL (5V) level serial interface. Using a logic level shifter (bi-directional), we shift\nthe voltage to the 3.3V UART of the ESP-12E.",[18,13285,13286,13287,13289],{},"We put a NodeMCU and logic level shifter into a small enclosure with DE-9f connector, and connected it via a regular\nserial cable (1:1) to the coffee machine. Using available sources ",[53,13288,2409],{}," we knew the pin-out of the DE-9f connector on\nthe machine, as well as the protocol it speaks. The resulting hardware is small enough to be tucked away behind the\nmachine.",[18,13291,13292],{},"A small complication we found is that none of the coffee machines we have here (Xs9, Xs90, XJ9, X3) seem to enable\ntheir UARTs when in standby mode (all pins are 0V), which makes it impossible for us to tell the machine to wake up out\nof standby. This is however not a problem for further functionality.",[18,13294,13295],{},"The +5V from the coffee machine is used to power the ESP-12E when it comes out of standby (power button pressed),\nafter which the node requests its configuration from the C&C server and starts querying the coffee machine for the\ncontents of its EEPROM storage, to read out counters for coffee use.",[18,13297,13298],{},"For this we use the ‘RT:0000’ command, which instructs the system to return the first row of values in the EEPROM (\noffset 0). This command is encoded using the Jura protocol ‘standard’ before it’s sent. This involves post-fixing CR\nand LF and taking each bit of the bytes we wish to send, putting them at position 2 or 5 of an 0xFF masked byte,\nessentially padding out the payload bytes.",[18,13300,13301],{},"The response has to be decoded in the reverse fashion: taking bits 2 and 5 out of each byte we receive and assembling\none byte out of four received. This response also ends with a CR and LF.",[18,13303,13304],{},"Decoded, the response looks like this:",[18,13306,13307],{},[50,13308,13309],{},"rt:008D0001054E0000000F00390000009300290000162500000000000000000000",[18,13311,13312],{},"We get a lower-case confirmation of the command we sent, followed by the value. In this string each two bytes (four hex\nnumbers) form a counter for a product, meaning in theory after 0xFFFF (65,535 decimal) cups of a product we would see an\noverflow. If we look at the earlier Grafana dashboard, we can however see that the Xs9 is still at fewer than 9,000 cups\nof coffee (the most popular product), despite having been in use for years. A 16-bit counter is therefore likely\nsufficient.",[18,13314,13315],{},"So far we confirmed these counters across our coffee machines:",[12474,13317,13318,13321,13324,13327],{},[580,13319,13320],{},"espresso",[580,13322,13323],{},"2x espresso",[580,13325,13326],{},"coffee",[580,13328,13329],{},"2x coffee",[18,13331,13332],{},"These counters are all in sequential order in the EEPROM. Other counters follow these four, but since not each machine\nhas the same products, the offset of their counter might be slightly different or be absent.",[18,13334,13335],{},"After reading out the relevant counters using appropriate offsets, they are published via MQTT on their own topics, to\nbe written by the MQTT-to-Influx service into the InfluxDB.",[18,13337,13338],{},[27,13339,13340],{},"LEDs",[18,13342,13343],{},"We’re looking at connecting various types of LED lighting to centrally control them (colour, intensity, etc.), including\nWS2812b RGB LEDs and kin.",[18,13345,13346],{},[27,13347,13348],{},"Grafana",[18,13350,13351],{},"Since we use Grafana elsewhere in the company already for data visualisation, we used it for this project as well. When\ntrying to configure things in a slightly more complex fashion (summing series, tables with just labels and associated\ncurrent value, etc.), we did however quickly run into limitations.",[18,13353,13354],{},[27,13355,13356],{},"Next steps",[18,13358,13359],{},"The coming time we will mostly take the existing system further: adding more sensors (light level, noise), as well as\nLEDs. We wish to add the ability to carry one’s favourite coffees between machines using NFC tags or similar. Maybe even\nadd the coffee-carrying robots many of us seem to keep mentioning.",[18,13361,13362],{},"For the sensor nodes we are working on creating (acrylic) enclosures which should be both practical and attractive\nenough to be put around the office.",[18,13364,13365],{},"The C&C server and associated UI can be made more functional and extensive, with further configuration options added to\nthe ESP8266 firmware’s modules.",[18,13367,13368],{},"The use of Grafana is at this point slightly controversial due to the limitations we found. We may look around for a\nmore scriptable alternative if these limitations prove to be insurmountable.",[18,13370,13371],{},"Beyond all of this the main goal of the project remains to improve comfort and fun levels for everyone 🙂",[18,13373,13374,8713,13377,8713,13380,8713,13383,8713,13386],{},[2223,13375],{"alt":48,"src":13376},"https://media.synyx.de/uploads//2017/03/DSC02242_04.jpg",[2223,13378],{"alt":48,"src":13379},"https://media.synyx.de/uploads//2017/03/DSC02245_02.jpg",[2223,13381],{"alt":48,"src":13382},"https://media.synyx.de/uploads//2017/03/DSC02244.jpg",[2223,13384],{"alt":48,"src":13385},"https://media.synyx.de/uploads//2017/03/DSC02248_02.jpg",[2223,13387],{"alt":48,"src":13388},"https://media.synyx.de/uploads//2017/03/DSC02243.jpg",[18,13390,13391],{},[27,13392,13393],{},"References",[18,13395,13396,8713,13398],{},[53,13397,2546],{},[585,13399,13400],{"href":13400,"rel":13401},"https://github.com/SmingHub/Sming",[589],[18,13403,13404,8713,13406],{},[53,13405,2409],{},[585,13407,13408],{"href":13408,"rel":13409},"http://protocoljura.wiki-site.com/index.php/Hauptseite",[589],{"title":48,"searchDepth":86,"depth":86,"links":13411},[],[613],"2017-03-23T15:18:41","A few months ago, here at the synyx office we started out with a simple idea: hook up a couple of sensors to an ESP8266\\nmodule (or twenty) and have it write its data somewhere for visualisation purposes. Then we got creative.","https://synyx.de/blog/visualising-sensors-and-coffee-machines-with-esp8266-mqtt-influxdb-and-grafana/",{},"/blog/visualising-sensors-and-coffee-machines-with-esp8266-mqtt-influxdb-and-grafana",{"title":13178,"description":13188},"blog/visualising-sensors-and-coffee-machines-with-esp8266-mqtt-influxdb-and-grafana",[13421,13422,13423,13424,13425,13426,13427],"c","esp8266","grafana","influxdb","mqtt","nodemcu","sming","A few months ago, here at the synyx office we started out with a simple idea: hook up a couple of sensors to an ESP8266 module (or twenty) and have…","8lgpb9e8mFlDVD9UIjRM-_4nIl-Ac-bgPTlLtBkpy4A",{"id":13431,"title":13432,"author":13433,"body":13436,"category":13633,"date":13634,"description":13635,"extension":617,"link":13636,"meta":13637,"navigation":499,"path":13638,"seo":13639,"slug":13440,"stem":13641,"tags":13642,"teaser":13646,"__hash__":13647},"blog/blog/axon-3-event-replaying.md","Axon 3: Event Replaying",[13434,13435],"messner","thieme",{"type":11,"value":13437,"toc":13619},[13438,13441,13455,13459,13466,13469,13473,13476,13480,13483,13487,13494,13498,13501,13509,13514,13517,13526,13536,13570,13573,13580,13589,13593,13596,13600,13603,13607,13610,13612,13614,13617],[14,13439,13432],{"id":13440},"axon-3-event-replaying",[18,13442,13443,13448,13449,13454],{},[585,13444,13447],{"href":13445,"rel":13446},"http://www.axonframework.org/",[589],"Axon"," is a lightweight framework that supports the implemenation of CQRS patterns by\nproviding commonly used building blocks. One of those patterns is an event sourced application architecture. Even though\nEvent Sourcing and CQRS are orthogonal concepts they fit together very well and are often used together. Event sourcing\nin an ",[585,13450,13453],{"href":13451,"rel":13452},"https://msdn.microsoft.com/en-us/library/jj591577.aspx",[589],"ES/CQRS"," architecture means that all changes to the\napplication state are done via domain events and the current state can always be rebuilt from the series of events\navailable in a persistent event store. In addition to the event store there might also be one or more read models, for\nexample to achieve opimtized query performance. No matter if there are read models or not, the event store is considered\nthe single source of truth.",[2207,13456,13458],{"id":13457},"event-replaying-what-and-why","Event Replaying: what and why?",[18,13460,13461,13462,13465],{},"Besides rebuilding current application state from the stored events we can employ a technique called ",[27,13463,13464],{},"Event Replaying","\nto achieve several other goals. For this purpose we need a mechanism to read all events from the event store and send\nthem to a set of components which are interested in handling them. This implies selecting and registering those\ncomponents.",[18,13467,13468],{},"Some use cases where event replaying can be applied are: generating new read models, removing inconsistencies by\nrebuilding an existing read model, or for analysis and debugging purposes.",[649,13470,13472],{"id":13471},"adding-new-read-models","Adding new read models",[18,13474,13475],{},"Suppose you have a library managemenent application where you can track and add meta data about your books. Sometime you\nmight want to switch from your table based search implementation to Elasticsearch. This can be done with event\nreplaying: all you have to do is to implement one or more event handlers that are responsible for extracting appropriate\ninformation from the domain events and inserting them into the Elasticsearch index.",[649,13477,13479],{"id":13478},"removing-inconsistencies-from-existing-read-models","Removing inconsistencies from existing read models",[18,13481,13482],{},"As software developers we may have to deal with bugs in our software even though we try hard to avoid them.\nNevertheless, bugs are inevitable. Imagine there is a bug in one of the event handling components that are supposed to\nrebuild the aforementioned Elasticsearch index. In such a situation, first the bug must be fixed (in code) and then all\ndomain events simply have to be replayed to the now well behaving event handler again.",[649,13484,13486],{"id":13485},"debugging-purposes","Debugging purposes",[18,13488,13489,13490,13493],{},"Sometimes you want to know exactly ",[573,13491,13492],{},"when"," an inconsistent application state has been introduced. One approach to achieve\nthis could be to replay all events up to a certain point in time and examine the corresponding application state.",[2207,13495,13497],{"id":13496},"how","How?",[18,13499,13500],{},"Introducing event replaying in an application that already uses the Axon 3 framework is fairly easy. Axon provides all\nbuilding blocks needed to achieve this, in particular there are event handlers and event processors. Event handlers\nimplement all business logic, whereas event processors are responsible for taking care of the technical aspects of event\nprocessing.",[18,13502,13503,13504,4101],{},"There are two types of event processors: Subscribing Event Processors and Tracking Event Processors. From\nthe ",[585,13505,13508],{"href":13506,"rel":13507},"https://docs.axonframework.org/v/3.0/part3/event-processing.html",[589],"Axon documentation",[11259,13510,13511],{},[18,13512,13513],{},"The Subscribing Event Processors subscribe themselves to a source of Events and are invoked by the thread managed by\nthe publishing mechanism. Tracking Event Processors, on the other hand, pull their messages from a source using a\nthread\nthat it manages itself.",[18,13515,13516],{},"It’s the tracking event processors that provide event replaying capabilities. They’re keeping track of which events have\nalready been processed by means of storing a token. So let’s configure Axon to use tracking processors (instead of the\ndefault subscribing processors):",[43,13518,13520],{"className":3792,"code":13519,"language":3794,"meta":48,"style":48},"eventHandlingConfiguration.usingTrackingProcessors();\n",[50,13521,13522],{"__ignoreMap":48},[53,13523,13524],{"class":55,"line":56},[53,13525,13519],{},[18,13527,13528,13529,13532,13533,4101],{},"This will automatically create tracking event processors for your event handlers. Axon uses a token store to determine\nwhether there are events that need to be processed. This token store will be checked regulary. If you’re using the Axon\nJPA provider there is already an entity class ",[50,13530,13531],{},"TokenEntry"," available. You have to tell your JPA provider where it is\nlocated. In Spring Boot this can be done with ",[50,13534,13535],{},"@EntityScan",[43,13537,13539],{"className":3792,"code":13538,"language":3794,"meta":48,"style":48}," @EntityScan(\n basePackages = {\n ...,\n \"org.axonframework.eventhandling.tokenstore.jpa\"\n }\n )\n",[50,13540,13541,13546,13551,13556,13561,13565],{"__ignoreMap":48},[53,13542,13543],{"class":55,"line":56},[53,13544,13545],{}," @EntityScan(\n",[53,13547,13548],{"class":55,"line":86},[53,13549,13550],{}," basePackages = {\n",[53,13552,13553],{"class":55,"line":126},[53,13554,13555],{}," ...,\n",[53,13557,13558],{"class":55,"line":163},[53,13559,13560],{}," \"org.axonframework.eventhandling.tokenstore.jpa\"\n",[53,13562,13563],{"class":55,"line":186},[53,13564,3242],{},[53,13566,13567],{"class":55,"line":221},[53,13568,13569],{}," )\n",[18,13571,13572],{},"This is everything needed to configure your application to be able to do event replaying.",[18,13574,13575,13576,13579],{},"In order to trigger an event replay in production you just need to delete the tracking tokens associated with an event\nprocessor from the",[50,13577,13578],{},"token_entry"," table. The corresponding event processor then pulls all events from the event store.",[18,13581,13582,13583,13588],{},"This is a really simple configuration. It’s also possible to configure seperate tracking event processors for different\nreplaying use cases. See\nthe ",[585,13584,13587],{"href":13585,"rel":13586},"http://www.axonframework.org/apidocs/3.0/org/axonframework/config/EventHandlingConfiguration.html",[589],"EventHandlingConfiguration","\nclass for more detail.",[2207,13590,13592],{"id":13591},"common-pitfalls","Common pitfalls",[18,13594,13595],{},"Event replaying is a very useful technique but you have to be aware of some common pitfalls.",[649,13597,13599],{"id":13598},"replaying-events-to-external-services","Replaying events to external services",[18,13601,13602],{},"Make sure that replayed events never get processed accidentally by the wrong event handlers. For example events might be\ntransformed into messages and published to a messaging middleware, and eventually consumed by external systems. In this\ncase you want to exclude the corresponding event handlers from event replaying because messages should only be published\nonce.",[649,13604,13606],{"id":13605},"eventual-consistency","Eventual Consistency",[18,13608,13609],{},"If you’ve used the default subscribing event processors and switch to tracking event processors remember that they are\npulling for events in a thread managed on their own. If your user interface relies on everything being handled in one\nthread, the user interface will break now. In this case, consider using subscribing event processors for\nbusiness-as-usual actions and only use tracking ones if you intent to trigger an event replay. This requires some more\nelaborate configuration for when event replaying is executed. Another option could be to let your user interface pull\nfor updates.",[649,13611],{"id":48},[2207,13613,12384],{"id":12383},[18,13615,13616],{},"Event replaying in Axon 3 is supported by tracking event processors: they keep track of the last position in the event\nlog. In order to replay all events all you have to do is resetting the corresponding tracking token.",[607,13618,989],{},{"title":48,"searchDepth":86,"depth":86,"links":13620},[13621,13626,13627,13632],{"id":13457,"depth":86,"text":13458,"children":13622},[13623,13624,13625],{"id":13471,"depth":126,"text":13472},{"id":13478,"depth":126,"text":13479},{"id":13485,"depth":126,"text":13486},{"id":13496,"depth":86,"text":13497},{"id":13591,"depth":86,"text":13592,"children":13628},[13629,13630,13631],{"id":13598,"depth":126,"text":13599},{"id":13605,"depth":126,"text":13606},{"id":48,"depth":126,"text":48},{"id":12383,"depth":86,"text":12384},[613],"2017-03-01T08:14:51","Axon is a lightweight framework that supports the implemenation of CQRS patterns by\\nproviding commonly used building blocks. One of those patterns is an event sourced application architecture. Even though\\nEvent Sourcing and CQRS are orthogonal concepts they fit together very well and are often used together. Event sourcing\\nin an ES/CQRS architecture means that all changes to the\\napplication state are done via domain events and the current state can always be rebuilt from the series of events\\navailable in a persistent event store. In addition to the event store there might also be one or more read models, for\\nexample to achieve opimtized query performance. No matter if there are read models or not, the event store is considered\\nthe single source of truth.","https://synyx.de/blog/axon-3-event-replaying/",{},"/blog/axon-3-event-replaying",{"title":13432,"description":13640},"Axon is a lightweight framework that supports the implemenation of CQRS patterns by\nproviding commonly used building blocks. One of those patterns is an event sourced application architecture. Even though\nEvent Sourcing and CQRS are orthogonal concepts they fit together very well and are often used together. Event sourcing\nin an ES/CQRS architecture means that all changes to the\napplication state are done via domain events and the current state can always be rebuilt from the series of events\navailable in a persistent event store. In addition to the event store there might also be one or more read models, for\nexample to achieve opimtized query performance. No matter if there are read models or not, the event store is considered\nthe single source of truth.","blog/axon-3-event-replaying",[13643,13644,13645,290],"axon","cqrs","event-sourcing","Axon is a lightweight framework that supports the implemenation of CQRS patterns by providing commonly used building blocks. One of those patterns is an event sourced application architecture. Even…","EsUmpQAdRuDUdhFKW4wvztcDCze9dI5vCmw1_n2XNcA",{"id":13649,"title":13650,"author":13651,"body":13653,"category":13965,"date":13966,"description":13967,"extension":617,"link":13968,"meta":13969,"navigation":499,"path":13970,"seo":13971,"slug":13657,"stem":13973,"tags":13974,"teaser":13978,"__hash__":13979},"blog/blog/welcome-to-spring-ldap-with-ssl-the-entrance-is-free.md","Welcome to Spring LDAP with SSL: The entrance is free",[13652],"schneider",{"type":11,"value":13654,"toc":13963},[13655,13658,13673,13698,13701,13708,13715,13718,13729,13738,13747,13750,13757,13760,13771,13777,13784,13829,13832,13865,13876,13883,13891,13919,13922,13929,13932,13952,13961],[14,13656,13650],{"id":13657},"welcome-to-spring-ldap-with-ssl-the-entrance-is-free",[18,13659,13660,13661,13666,13667,13672],{},"Some time ago we started to create our own intranet called OpenCoffee, which is\na ",[585,13662,13665],{"href":13663,"rel":13664},"https://en.wikipedia.org/wiki/Microservices",[589],"microservice"," landscape based on our own Spring Boot Starters, a service\ndiscovery and an ",[585,13668,13671],{"href":13669,"rel":13670},"https://oauth.net/2/",[589],"OAuth2 service",". The vision is to create a system with a very easy integration\nof new applications by just adding a few Spring Boot Starters and starting to code the specific functionality of the new\nservice.",[18,13674,13675,13676,13681,13682,13686,13687,13692,13693,986],{},"We passed that stage of the developer friendly integration, started to look at the security and vulnerability of the\nsystem where we tried to make things more secure. At some point we talked about SSL encryption of the traffic between\nthe OAuth2 service and our ",[585,13677,13680],{"href":13678,"rel":13679},"https://de.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol",[589],"LDAP",". We used Spring\nLDAP for the communication between these two services and started reading\nthe ",[585,13683,10174],{"href":13684,"rel":13685},"http://docs.spring.io/spring-ldap/docs/1.3.x/reference/html/configuration.html",[589]," on how to get this\ndone. We achieved this quite easily by replacing\nthe ",[585,13688,13691],{"href":13689,"rel":13690},"https://github.com/spring-projects/spring-ldap/blob/master/core/src/main/java/org/springframework/ldap/core/support/DirContextAuthenticationStrategy.java",[589],"authentication strategy","\nin the LDAP context source with the\nprovided ",[585,13694,13697],{"href":13695,"rel":13696},"https://github.com/spring-projects/spring-ldap/blob/master/core/src/main/java/org/springframework/ldap/core/support/DefaultTlsDirContextAuthenticationStrategy.java",[589],"tls authentication strategy",[18,13699,13700],{},"Spring is doing a good job of an easy configuration, thanks guys.",[11259,13702,13703],{},[18,13704,13705],{},[573,13706,13707],{},"We activated the TLS authentication strategy and it was not safe anymore",[18,13709,13710,13711,13714],{},"At the beginning we were happy but then we started our tests and there it happened. Our tests failed because it was very\neasy to get access via our OAuth2 service with an activated TLS authentication strategy. You just have to know a\nusername that is defined in our LDAP and it will grant access with every password ",[573,13712,13713],{},"you"," want to use. That is not even\nsecurity through obscurity. Just a friendly bouncer.",[18,13716,13717],{},"Not so good for us so we started to dig into the code, maybe it was a mistake on our side. After a long debugging\nsession with two instances of our OAuth2 service, with and without the TLS authentication strategy, we found out that\nthe last bind to the LDAP server, which should contain the user credentials, was an anonymous bind.",[18,13719,13720,13721,13724,13725,13728],{},"This anonymous bind returned with the message “",[573,13722,13723],{},"Yeah OAuth2 server, you can let the anonymous user log in, he is\nsecure","“. Of course that last anonymous bind was ",[573,13726,13727],{},"not ok",". Was it our fault or does Spring LDAP have a bug with\nanonymous binds and the TLS authentication strategy?",[18,13730,13731,13732,13737],{},"We started to isolate the bug and created\na ",[585,13733,13736],{"href":13734,"rel":13735},"https://github.com/derTobsch/tls-bug-demo/tree/master/docker",[589],"docker container"," with SSL encryption, activated\nanonymous binds and allowed anonymous user search through the directory tree.",[18,13739,13740,13741,13746],{},"After some time we had our docker container together with all the configuration we needed to reproduce the bug,\nhopefully. A small ",[585,13742,13745],{"href":13743,"rel":13744},"https://github.com/derTobsch/tls-bug-demo",[589],"Spring Boot project"," with Spring LDAP was easy to set up\nand we configured everything. We logged in with correct credentials and everything was fine.",[18,13748,13749],{},"Then after all this debugging and a good session with my colleagues from synyx, thanks to all of you – that was a really\nnice Friday with you – we tried to login with the best incorrect credentials “user” and the password “wurstsalat”. Maybe\nyou know what happened, we were logged in and had a good feeling from chasing, finding and isolating this bug.",[11259,13751,13752],{},[18,13753,13754],{},[573,13755,13756],{},"Was it only the authentication strategy that caused that bug?",[18,13758,13759],{},"It was not only the authentication strategy. It was a combination of three things:",[577,13761,13762,13765,13768],{},[580,13763,13764],{},"‘Groupsearch’ is activated and used",[580,13766,13767],{},"Anonymous binds are activated and allowed to search through the directory tree",[580,13769,13770],{},"‘DefaultTlsDirContextAuthenticationStrategy’ is used as authentication strategy",[18,13772,13773,13774,986],{},"When you know what is happening, you can easily see the problems with the ",[50,13775,13776],{},"DefaultTlsDirContextAuthenticationStrategy",[18,13778,13779,13780,13783],{},"At the beginning of the communication a TLS connection is started and a lookup of the user ",[50,13781,13782],{},"cn=user"," is made:",[43,13785,13789],{"className":13786,"code":13787,"language":13788,"meta":48,"style":48},"language-plaintext shiki shiki-themes github-light github-dark","...\n588241ed conn=1011 fd=16 TLS established tls_ssf=128 ssf=128\n588241ed conn=1011 op=1 BIND dn=\"\" method=128\n588241ed conn=1011 op=1 RESULT tag=97 err=0 text=\n588241ed conn=1011 op=2 SRCH base=\"ou=People,dc=example,dc=org\" scope=2 deref=3 filter=\"(\u003Cstrong>cn=user\u003C/strong>)\"\n588241ed \u003C = bdb_equality_candidates: (cn) not indexed\n588241ed conn=1011 op=2 SEARCH RESULT tag=101 err=0 nentries=1 text=\n...\n","plaintext",[50,13790,13791,13795,13800,13805,13810,13815,13820,13825],{"__ignoreMap":48},[53,13792,13793],{"class":55,"line":56},[53,13794,6348],{},[53,13796,13797],{"class":55,"line":86},[53,13798,13799],{},"588241ed conn=1011 fd=16 TLS established tls_ssf=128 ssf=128\n",[53,13801,13802],{"class":55,"line":126},[53,13803,13804],{},"588241ed conn=1011 op=1 BIND dn=\"\" method=128\n",[53,13806,13807],{"class":55,"line":163},[53,13808,13809],{},"588241ed conn=1011 op=1 RESULT tag=97 err=0 text=\n",[53,13811,13812],{"class":55,"line":186},[53,13813,13814],{},"588241ed conn=1011 op=2 SRCH base=\"ou=People,dc=example,dc=org\" scope=2 deref=3 filter=\"(\u003Cstrong>cn=user\u003C/strong>)\"\n",[53,13816,13817],{"class":55,"line":221},[53,13818,13819],{},"588241ed \u003C = bdb_equality_candidates: (cn) not indexed\n",[53,13821,13822],{"class":55,"line":242},[53,13823,13824],{},"588241ed conn=1011 op=2 SEARCH RESULT tag=101 err=0 nentries=1 text=\n",[53,13826,13827],{"class":55,"line":273},[53,13828,6348],{},[18,13830,13831],{},"then the second bind is made, which should contain the credentials but",[43,13833,13835],{"className":13786,"code":13834,"language":13788,"meta":48,"style":48},"...\n588241ee conn=1013 fd=16 TLS established tls_ssf=128 ssf=128\n588241ee conn=1013 op=1 BIND \u003Cstrong>dn=\"\"\u003C/strong> method=128\n588241ee conn=1013 op=1 RESULT tag=97 err=0 text=\n588241ee conn=1013 op=2 SRCH base=\"ou=Groups,dc=example,dc=org\" scope=1 deref=3 filter=\"(member=cn=user,ou=people,dc=example,dc=org)\"\n...\n",[50,13836,13837,13841,13846,13851,13856,13861],{"__ignoreMap":48},[53,13838,13839],{"class":55,"line":56},[53,13840,6348],{},[53,13842,13843],{"class":55,"line":86},[53,13844,13845],{},"588241ee conn=1013 fd=16 TLS established tls_ssf=128 ssf=128\n",[53,13847,13848],{"class":55,"line":126},[53,13849,13850],{},"588241ee conn=1013 op=1 BIND \u003Cstrong>dn=\"\"\u003C/strong> method=128\n",[53,13852,13853],{"class":55,"line":163},[53,13854,13855],{},"588241ee conn=1013 op=1 RESULT tag=97 err=0 text=\n",[53,13857,13858],{"class":55,"line":186},[53,13859,13860],{},"588241ee conn=1013 op=2 SRCH base=\"ou=Groups,dc=example,dc=org\" scope=1 deref=3 filter=\"(member=cn=user,ou=people,dc=example,dc=org)\"\n",[53,13862,13863],{"class":55,"line":221},[53,13864,6348],{},[18,13866,13867,13868,13871,13872],{},"you can see the empty ",[50,13869,13870],{},"dn=\"\""," which means the user will be logged in without any authentication. If you want to take a\ndeeper look you can check out the ",[585,13873,13875],{"href":13734,"rel":13874},[589],"sample project",[11259,13877,13878],{},[18,13879,13880],{},[573,13881,13882],{},"Let’s contribute to Spring LDAP",[18,13884,13885,13886,13890],{},"We opened a ",[585,13887,10119],{"href":13888,"rel":13889},"https://github.com/spring-projects/spring-ldap/pull/432",[589]," with a recommendation how to fix\nthis issue and as you can see below the last bind will now contain the user credentials",[43,13892,13894],{"className":13786,"code":13893,"language":13788,"meta":48,"style":48},"...\n58824373 conn=1015 fd=16 TLS established tls_ssf=128 ssf=128\n58824373 conn=1015 op=1 BIND \u003Cstrong>dn=\"cn=user,ou=People,dc=example,dc=org\"\u003C/strong> method=128\n58824373 conn=1015 op=1 RESULT tag=97 err=49 text=\n...\n",[50,13895,13896,13900,13905,13910,13915],{"__ignoreMap":48},[53,13897,13898],{"class":55,"line":56},[53,13899,6348],{},[53,13901,13902],{"class":55,"line":86},[53,13903,13904],{},"58824373 conn=1015 fd=16 TLS established tls_ssf=128 ssf=128\n",[53,13906,13907],{"class":55,"line":126},[53,13908,13909],{},"58824373 conn=1015 op=1 BIND \u003Cstrong>dn=\"cn=user,ou=People,dc=example,dc=org\"\u003C/strong> method=128\n",[53,13911,13912],{"class":55,"line":163},[53,13913,13914],{},"58824373 conn=1015 op=1 RESULT tag=97 err=49 text=\n",[53,13916,13917],{"class":55,"line":186},[53,13918,6348],{},[18,13920,13921],{},"and users with wrong credentials will not be allowed to log in.",[11259,13923,13924],{},[18,13925,13926],{},[573,13927,13928],{},"A long but satisfying journey",[18,13930,13931],{},"It took as quite some time to get near the bug and understand it, but in the end we learned something again",[577,13933,13934,13937,13940,13943],{},[580,13935,13936],{},"Have valuable integration tests",[580,13938,13939],{},"Do not give up and debug as deep as you can. You can learn a lot.",[580,13941,13942],{},"Open source projects are there to contribute",[580,13944,13945,13946,13951],{},"It is fun to chase some ",[585,13947,13950],{"href":13948,"rel":13949},"https://en.wikipedia.org/wiki/Starship_Troopers_(film)",[589],"bugs"," 🙂",[18,13953,13954,13955,13960],{},"I hope you enjoyed the blog and it will make you curious what the ",[585,13956,13959],{"href":13957,"rel":13958},"https://github.com/open-coffee",[589],"OpenCoffee"," will be.",[607,13962,989],{},{"title":48,"searchDepth":86,"depth":86,"links":13964},[],[6869,613,996],"2017-02-17T10:53:19","Some time ago we started to create our own intranet called OpenCoffee, which is\\na microservice landscape based on our own Spring Boot Starters, a service\\ndiscovery and an OAuth2 service. The vision is to create a system with a very easy integration\\nof new applications by just adding a few Spring Boot Starters and starting to code the specific functionality of the new\\nservice.","https://synyx.de/blog/welcome-to-spring-ldap-with-ssl-the-entrance-is-free/",{},"/blog/welcome-to-spring-ldap-with-ssl-the-entrance-is-free",{"title":13650,"description":13972},"Some time ago we started to create our own intranet called OpenCoffee, which is\na microservice landscape based on our own Spring Boot Starters, a service\ndiscovery and an OAuth2 service. The vision is to create a system with a very easy integration\nof new applications by just adding a few Spring Boot Starters and starting to code the specific functionality of the new\nservice.","blog/welcome-to-spring-ldap-with-ssl-the-entrance-is-free",[13975,1010,13976,13977],"oauth2","spring-boot","spring-ldap","Some time ago we started to create our own intranet called OpenCoffee, which is a microservice landscape based on our own Spring Boot Starters, a service discovery and an OAuth2…","t80MrMboDJkPsgFP9FgZjHfjRfpqmJUiyUzT4la3Q2s",{"id":13981,"title":13982,"author":13983,"body":13984,"category":14027,"date":14028,"description":14029,"extension":617,"link":14030,"meta":14031,"navigation":499,"path":14032,"seo":14033,"slug":14034,"stem":14035,"tags":14036,"teaser":14040,"__hash__":14041},"blog/blog/kommunikationsgrenzen-ueberschreiten-durch-visualisierung.md","Kommunikationsgrenzen überschreiten durch Visualisierung",[13652],{"type":11,"value":13985,"toc":14025},[13986,13989,13992,13995,14000,14003,14014,14017,14022],[14,13987,13982],{"id":13988},"kommunikationsgrenzen-überschreiten-durch-visualisierung",[18,13990,13991],{},"Als IT-Dienstleister setzt man bekanntlicherweise die Ideen und Wünsche des Kunden um. Deshalb befasst man sich sehr\nfrüh im Projekt mit dessen Fachlichkeit, um ein möglichst tiefes Verständnis über das Produkt und dessen Kontext zu\nerhalten.",[18,13993,13994],{},"Hier bot es sich in unserem neuen Projekt an eine neue Richtung einzuschlagen, um schneller ein einheitliches\nVerständnis zu erlangen. Das neue Projekt beschäftigt sich mit der Unterstützung der Arbeitsabläufe an einem\nContainerterminal. Ein Containerterminal ist ein Umschlagspunkt für Container, welche zum Beispiel für den weiteren\nTransport von einem Binnenschiff auf ein LKW umgeschlagen werden. Bei diesem Projekt bot es sich an das\nContainerterminal als Modell mit den wichtigsten Elementen zu modellieren. Also wurden Kräne, Stacker und LKWs\nkurzerhand nach gebaut. Nein nicht digital. Analog.",[18,13996,13997],{},[2223,13998],{"alt":48,"src":13999},"https://media.synyx.de/uploads//2017/01/IMG_6110.jpg",[18,14001,14002],{},"Mit unserem Modell sind wir nun in der Lage den kompletten Durchlauf eines Container an einem Containerterminal von der\nEinfahrtkontrolle über die Abfertigung mit einem Kran oder Stacker bis zur Ausfahrtkontrolle nach zu stellen.",[18,14004,14005,8713,14008,8713,14011],{},[2223,14006],{"alt":48,"src":14007},"https://media.synyx.de/uploads//2017/01/IMG_6103.jpg",[2223,14009],{"alt":48,"src":14010},"https://media.synyx.de/uploads//2017/01/IMG_6093.jpg",[2223,14012],{"alt":48,"src":14013},"https://media.synyx.de/uploads//2017/01/IMG_6100.jpg",[18,14015,14016],{},"Die Kommunikation zwischen dem Entwicklerteam und dem Kunden wurde dadurch deutlich vereinfacht. Gerade in Plannings und\nReviews, aber auch bei offenen Fragen beim Bearbeiten eines Tickets während des Sprints, griffen wir immer wieder auf\ndas Modell zurück, um Abläufe zu verstehen und einzelne Details nach zu stellen und diese zu analysieren.",[18,14018,14019],{},[2223,14020],{"alt":48,"src":14021},"https://media.synyx.de/uploads//2017/01/IMG_6073.jpg",[18,14023,14024],{},"Mit unseren bisherigen Erfahrungen können wir das Vorgehen mit dem Modell nur empfehlen. Es übertrifft Scribbles durch\nseine Dynamik und Interaktionsmöglichkeiten und holt durch den spielerischen Aspekt sehr einfach die Kollegen und den\nKunden ab. Für unsere nächsten Projekte würden wir solch ein Modell sofort wieder einsetzen.",{"title":48,"searchDepth":86,"depth":86,"links":14026},[],[613],"2017-02-01T13:09:53","Als IT-Dienstleister setzt man bekanntlicherweise die Ideen und Wünsche des Kunden um. Deshalb befasst man sich sehr\\nfrüh im Projekt mit dessen Fachlichkeit, um ein möglichst tiefes Verständnis über das Produkt und dessen Kontext zu\\nerhalten.","https://synyx.de/blog/kommunikationsgrenzen-ueberschreiten-durch-visualisierung/",{},"/blog/kommunikationsgrenzen-ueberschreiten-durch-visualisierung",{"title":13982,"description":13991},"kommunikationsgrenzen-ueberschreiten-durch-visualisierung","blog/kommunikationsgrenzen-ueberschreiten-durch-visualisierung",[14037,14038,14039],"kommunikation","lego","scrum","Als IT-Dienstleister setzt man bekanntlicherweise die Ideen und Wünsche des Kunden um. Deshalb befasst man sich sehr früh im Projekt mit dessen Fachlichkeit, um ein möglichst tiefes Verständnis über das…","gFh4am_7Ll9-fh-N5qyc2iAk6dzBY5KUFQctF0PEw08",{"id":14043,"title":14044,"author":14045,"body":14046,"category":14151,"date":14152,"description":14153,"extension":617,"link":14154,"meta":14155,"navigation":499,"path":14156,"seo":14157,"slug":14050,"stem":14158,"tags":14159,"teaser":14161,"__hash__":14162},"blog/blog/jdk-jongleur.md","JDK Jongleur",[8872],{"type":11,"value":14047,"toc":14149},[14048,14051,14054,14057,14069,14072,14075,14085,14088,14091,14101,14104,14114,14117,14127,14130,14133,14144,14147],[14,14049,14044],{"id":14050},"jdk-jongleur",[18,14052,14053],{},"Als Java Entwickler im Dienstleistungs-Umfeld hat man meist mit vielen Projekten und damit auch mit mehreren\nJDK-Versionen zu tun. Und selbst wenn man nur an einem Projekt mit fixer JDK-Version arbeitet, steht immer ein neues\nJDK vor der Tür, dessen Features man kennenlernen will oder dessen Kompatibilität mit dem aktuellen Projekt untersucht\nwerden soll.",[18,14055,14056],{},"Die unterschiedlichen Betriebssysteme haben alle mehr oder weniger gute Mechanismen, um systemweit das verwendete JDK zu\nändern. Bei einer globalen Änderung der JDK-Version kann es zumindest bei Major-Versionssprüngen unschöne Nebeneffekte\ngeben. Daher ist es meistens sinnvoller, das gewünschte JDK aus dem Internet herunterzuladen und dann lokal für eine\nShell eines Benutzers durch das Setzen von PATH und JAVA_HOME verfügbar machen.",[18,14058,14059,14060,1067,14065,14068],{},"Diese manuellen Schritte nimmt einem ",[585,14061,14064],{"href":14062,"rel":14063},"https://github.com/shyiko/jabba",[589],"Jabba",[585,14066,14062],{"href":14062,"rel":14067},[589],") ab. Jabba\nist ein in Go geschriebenes Tool, das zum einen Download und lokale Installation verschiedenster JDK-Versionen erledigt\nund zum anderen das lokale Wechseln dieser Versionen sehr einfach macht.",[18,14070,14071],{},"Nach der Installation des Tools kann man sich per ‘jabba ls-remote’",[18,14073,14074],{},"alle für Jabba gepflegten JDKs auflisten lassen:",[43,14076,14078],{"className":13786,"code":14077,"language":13788,"meta":48,"style":48},"jabba ls-remote\n\n",[50,14079,14080],{"__ignoreMap":48},[53,14081,14082],{"class":55,"line":56},[53,14083,14084],{},"jabba ls-remote\n",[18,14086,14087],{},"Neue JDKs können auf dem Github Repo von Jabba als PullRequest hinzugefügt werden (falls es die Maintainer nicht schon\ngemacht haben).",[18,14089,14090],{},"JDK-Versionen, die lokal noch nicht installiert wurden, können mit ‘jabba install’ installiert werden.",[43,14092,14094],{"className":13786,"code":14093,"language":13788,"meta":48,"style":48},"jabba install 1.8.112\n\n",[50,14095,14096],{"__ignoreMap":48},[53,14097,14098],{"class":55,"line":56},[53,14099,14100],{},"jabba install 1.8.112\n",[18,14102,14103],{},"Falls es schon systemweit installierte JDKs gibt, können diese mit ‘jabba link’ verlinkt und damit zur lokalen\nVerwendung in Jabba bereitgestellt werden.",[43,14105,14107],{"className":13786,"code":14106,"language":13788,"meta":48,"style":48},"jabba link system@1.8.112 /Library/Java/JavaVirtualMachines/jdk1.8.0_112.jdk\n\n",[50,14108,14109],{"__ignoreMap":48},[53,14110,14111],{"class":55,"line":56},[53,14112,14113],{},"jabba link system@1.8.112 /Library/Java/JavaVirtualMachines/jdk1.8.0_112.jdk\n",[18,14115,14116],{},"Will man in seiner Shell ein bestimmtes JDK nutzen, kann dieses einfach per ‘jabba use’ ausgewählt werden:",[43,14118,14120],{"className":13786,"code":14119,"language":13788,"meta":48,"style":48},"jabba use 1.8.112\n\n",[50,14121,14122],{"__ignoreMap":48},[53,14123,14124],{"class":55,"line":56},[53,14125,14126],{},"jabba use 1.8.112\n",[18,14128,14129],{},"Alles in allem ist Jabba ein kleines, handliches Tool, das genau das tut, was es soll und mir meinen Arbeitsalltag mit\nunterschiedlichen und zum Teil auch early-access Versionen von JDK 9 erheblich einfacher macht.",[18,14131,14132],{},"Wie grenzt sich Jabba nun von ähnlichen Tools (oder Skript-Lösungen) ab?",[577,14134,14135,14138,14141],{},[580,14136,14137],{},"Es bietet eine konsistente Nutzung auf die Linux, Windows und Mac.",[580,14139,14140],{},"Neben dem Setzen der JDK-Version kümmert sich Jabba auch um Download und lokale Installation (aka. Unzip).",[580,14142,14143],{},"Installation und JDK-Wahl haben nur Nutzer- bzw. Shell-lokale Effekte.",[18,14145,14146],{},"An dieser Stelle geht ein herzlicher Dank an Sven Ruppert (@SvenRuppert), der mich auf dieses praktische Tool aufmerksam\ngemacht hat.",[607,14148,989],{},{"title":48,"searchDepth":86,"depth":86,"links":14150},[],[613],"2017-01-16T10:10:07","Als Java Entwickler im Dienstleistungs-Umfeld hat man meist mit vielen Projekten und damit auch mit mehreren\\nJDK-Versionen zu tun. Und selbst wenn man nur an einem Projekt mit fixer JDK-Version arbeitet, steht immer ein neues\\nJDK vor der Tür, dessen Features man kennenlernen will oder dessen Kompatibilität mit dem aktuellen Projekt untersucht\\nwerden soll.","https://synyx.de/blog/jdk-jongleur/",{},"/blog/jdk-jongleur",{"title":14044,"description":14053},"blog/jdk-jongleur",[14160],"jdk","Als Java Entwickler im Dienstleistungs-Umfeld hat man meist mit vielen Projekten und damit auch mit mehreren JDK-Versionen zu tun. Und selbst wenn man nur an einem Projekt mit fixer JDK-Version arbeitet,…","CGBoSzrYGMvHm9e95aYCx2ggm7EDm8hKSNFlLS7z8gk",{"id":14164,"title":14165,"author":14166,"body":14167,"category":14565,"date":14566,"description":14174,"extension":617,"link":14567,"meta":14568,"navigation":499,"path":14569,"seo":14570,"slug":14171,"stem":14571,"tags":14572,"teaser":14575,"__hash__":14576},"blog/blog/bean-x-of-type-y-is-not-eligible-for-getting-processed-by-all-beanpostprocessors.md","Bean X of type Y is not eligible for getting processed by all BeanPostProcessors",[12981],{"type":11,"value":14168,"toc":14557},[14169,14172,14175,14179,14182,14185,14227,14247,14252,14261,14267,14281,14285,14292,14316,14323,14327,14330,14342,14346,14349,14360,14367,14370,14376,14389,14417,14420,14424,14427,14455,14458,14509,14530,14539,14542,14546,14549,14552,14555],[14,14170,14165],{"id":14171},"bean-x-of-type-y-is-not-eligible-for-getting-processed-by-all-beanpostprocessors",[18,14173,14174],{},"Recently we had a problem related Springs auto-proxy feature that I think is worth writing about.",[2207,14176,14178],{"id":14177},"the-problem","The Problem",[18,14180,14181],{},"We use Spring as our framework of choice because it provides us with a nice set of convenience features when\nbootstrapping and plugging together our application.",[18,14183,14184],{},"One of these features is caching: We cache our users’ roles because their definitions are stored in a pretty slow\nexternal system and change rarely.",[43,14186,14188],{"className":288,"code":14187,"language":290,"meta":48,"style":48},"\n@Component\npublic class RoleRepository {\n @Cacheable(CacheConfig.ROLES_NAME)\n public Set\u003CRole> loadRoles() {\n // .. slow call to external system\n }\n}\n\n",[50,14189,14190,14194,14199,14204,14209,14214,14219,14223],{"__ignoreMap":48},[53,14191,14192],{"class":55,"line":56},[53,14193,500],{"emptyLinePlaceholder":499},[53,14195,14196],{"class":55,"line":86},[53,14197,14198],{},"@Component\n",[53,14200,14201],{"class":55,"line":126},[53,14202,14203],{},"public class RoleRepository {\n",[53,14205,14206],{"class":55,"line":163},[53,14207,14208],{}," @Cacheable(CacheConfig.ROLES_NAME)\n",[53,14210,14211],{"class":55,"line":186},[53,14212,14213],{}," public Set\u003CRole> loadRoles() {\n",[53,14215,14216],{"class":55,"line":221},[53,14217,14218],{}," // .. slow call to external system\n",[53,14220,14221],{"class":55,"line":242},[53,14222,7384],{},[53,14224,14225],{"class":55,"line":273},[53,14226,282],{},[18,14228,14229,14230,14233,14234,14237,14238,14243,14244,14246],{},"Since our ",[50,14231,14232],{},"RoleRepository"," is a component managed by Spring it gets picked up automatically during boot and\n",[50,14235,14236],{},"loadRoles()"," gets backed by a cache. Spring implements this by proxying our repository.\nSee ",[585,14239,14242],{"href":14240,"rel":14241},"http://docs.spring.io/spring/docs/current/spring-framework-reference/html/cache.html",[589],"Spring Reference Documentation","\nfor details. As a result the “real” ",[50,14245,14236],{}," method gets triggered only once in 10 minutes and all the other calls\nare served from the cache.",[18,14248,14249,14250,986],{},"After several sprints we noticed a problem with the caching. While in caching worked for other beans it stopped working\nfor the shown ",[50,14251,14232],{},[18,14253,14254,14255,14257,14258,14260],{},"We noticed this because our health-check which (indirectly) triggers the ",[50,14256,14236],{}," and runs every 5 seconds did not\nhit the cache and therefore produced a log-entry every 5 seconds. The cache for roles was empty, regardless of how\noften ",[50,14259,14236],{}," was called.",[18,14262,14263,14264,14266],{},"While debugging the issue we found out that the proxy that should do the caching was not generated for ",[50,14265,14232],{},".\nDuring bootstrap of the application Spring gave us a corresponding hint:",[43,14268,14270],{"className":13786,"code":14269,"language":13788,"meta":48,"style":48},"\nINFO 7189 --- [ main] trationDelegate$BeanPostProcessorChecker : Bean 'RoleRepository' of type [class ...RoleRepository] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)\n\n",[50,14271,14272,14276],{"__ignoreMap":48},[53,14273,14274],{"class":55,"line":56},[53,14275,500],{"emptyLinePlaceholder":499},[53,14277,14278],{"class":55,"line":86},[53,14279,14280],{},"INFO 7189 --- [ main] trationDelegate$BeanPostProcessorChecker : Bean 'RoleRepository' of type [class ...RoleRepository] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)\n",[2207,14282,14284],{"id":14283},"what-is-a-beanpostprocessor","What is a BeanPostProcessor",[18,14286,14287,14288,14291],{},"A ",[50,14289,14290],{},"BeanPostProcessor"," is a special component that allows to manipulate other beans after they are created.",[18,14293,14294,14295,14298,14299,14302,14303,99,14306,99,14309,14312,14313,14315],{},"These postprocessors can manipulate beans or even replace them completely. Spring ships with several implementations\nthat do all kinds of stuff. For example there is one that checks if a bean implements ",[50,14296,14297],{},"ApplicationContextAware"," and sets\nthe ",[50,14300,14301],{},"ApplicationContext"," if so. Also many of the “proxy-stuff” for ",[50,14304,14305],{},"@Async",[50,14307,14308],{},"@Transactional",[50,14310,14311],{},"@Caching"," and so on is\ndone using a ",[50,14314,14290],{},". You can also implement your own postprocessors.",[18,14317,14318,14319,14322],{},"To be able to postprocess all beans with all ",[50,14320,14321],{},"BeanPostProcessors"," Spring has to create them before it creates the\n“regular” beans.",[2207,14324,14326],{"id":14325},"the-chicken-and-the-egg","The Chicken and the Egg",[18,14328,14329],{},"But you can also inject Spring beans into postprocessors. If you inject a bean into a postprocessor Spring has to create\nthis bean even before it creates the corresponding postprocessor you inject it to. In this case it no longer guarantees\nthat all postprocessors are able to process the injected bean but instead logs the message shown above.",[18,14331,14332,14333,14335,14336,14339,14340,986],{},"In our example the log-message means that there is one postprocessor that (directly or indirectly) leads to creation of\nour ",[50,14334,14232],{}," and there are more postprocessors to be created later (probaly the one that handles ",[50,14337,14338],{},"@Cachable",")\nthat will not be able to post-process our ",[50,14341,14232],{},[2207,14343,14345],{"id":14344},"debugging-the-problem","Debugging the Problem",[18,14347,14348],{},"Unfortunately the log-entry does not help very much in finding out what exactly is the problem.",[18,14350,14351,14352,14355,14356,14359],{},"The statement is produced by ",[50,14353,14354],{},"BeanPostProcessorChecker"," which is an inner class in ",[50,14357,14358],{},"PostProcessorRegistrationDelegate",".\nTo find out what exactly happens here I set a breakpoint right at the log-statement and booted our application in\ndebug-mode. Since there are several beans created I waited for the statement in question (in our case the one with\nbeanName = roleRepository).",[18,14361,14362,14363,14366],{},"To find out what caused the creation of our roleRepository I simply followed the call-stack down the ",[50,14364,14365],{},"getObject","\nmethods. In our case the chain was:",[18,14368,14369],{},"roleRepository -> authorityService -> customerPermissionEvaluator -> delegatingPermissionEvaluator ->\ncustomMethodSecurityConfiguration -> methodSecurityMetadataSource -> (… more Spring setup code …)",[18,14371,14372],{},[2223,14373],{"alt":14374,"src":14375},"\"debugging_getobject\"","https://media.synyx.de/uploads//2016/11/debugging_getObject.png",[18,14377,14378,14379,14381,14382,14385,14386,4101],{},"So the ",[50,14380,14232],{}," is created because it is needed by our custom implementation of Spring Securities\n",[50,14383,14384],{},"PermissionEvaluator","-Interface that is used to evaluate security related expressions like the ones that can be used\nwith ",[50,14387,14388],{},"@PreAuthorize",[43,14390,14392],{"className":288,"code":14391,"language":290,"meta":48,"style":48},"\n@PreAuthorize(\"hasPermission(#customer, AUTHORITY_BILLING)\")\npublic Optional\u003CBillingDoc> findById(Customer customer, String documentId) {\n // boring business logic here\n}\n\n",[50,14393,14394,14398,14403,14408,14413],{"__ignoreMap":48},[53,14395,14396],{"class":55,"line":56},[53,14397,500],{"emptyLinePlaceholder":499},[53,14399,14400],{"class":55,"line":86},[53,14401,14402],{},"@PreAuthorize(\"hasPermission(#customer, AUTHORITY_BILLING)\")\n",[53,14404,14405],{"class":55,"line":126},[53,14406,14407],{},"public Optional\u003CBillingDoc> findById(Customer customer, String documentId) {\n",[53,14409,14410],{"class":55,"line":163},[53,14411,14412],{}," // boring business logic here\n",[53,14414,14415],{"class":55,"line":186},[53,14416,282],{},[18,14418,14419],{},"So when Spring Security is bootstrapped (which seems to be done before caching is) it also initializes a part of our\n“business-beans” which then cannot be postprocessed with caching.",[2207,14421,14423],{"id":14422},"the-fix","The Fix",[18,14425,14426],{},"To fix the problem I cut the eager dependency from our security-code and replaced it by a lazy one.",[43,14428,14430],{"className":288,"code":14429,"language":290,"meta":48,"style":48},"\n@Autowired\npublic CustomerPermissionEvaluator(AuthorityService authorityService) {\n this.authorityService = authorityService;\n}\n\n",[50,14431,14432,14436,14441,14446,14451],{"__ignoreMap":48},[53,14433,14434],{"class":55,"line":56},[53,14435,500],{"emptyLinePlaceholder":499},[53,14437,14438],{"class":55,"line":86},[53,14439,14440],{},"@Autowired\n",[53,14442,14443],{"class":55,"line":126},[53,14444,14445],{},"public CustomerPermissionEvaluator(AuthorityService authorityService) {\n",[53,14447,14448],{"class":55,"line":163},[53,14449,14450],{}," this.authorityService = authorityService;\n",[53,14452,14453],{"class":55,"line":186},[53,14454,282],{},[18,14456,14457],{},"was changed to",[43,14459,14461],{"className":288,"code":14460,"language":290,"meta":48,"style":48},"\n@Autowired\npublic CustomerPermissionEvaluator(ObjectFactory\u003CAuthorityService> authorityServiceObjectFactory) {\n this.authorityServiceObjectFactory = authorityServiceObjectFactory;\n}\n@Override\npublic boolean hasPermission(Authentication authentication, Object targetDomainObject, Object permission) {\n AuthorityService service = this.authorityServiceObjectFactory.getObject();\n // ... do stuff with service\n}\n\n",[50,14462,14463,14467,14471,14476,14481,14485,14490,14495,14500,14505],{"__ignoreMap":48},[53,14464,14465],{"class":55,"line":56},[53,14466,500],{"emptyLinePlaceholder":499},[53,14468,14469],{"class":55,"line":86},[53,14470,14440],{},[53,14472,14473],{"class":55,"line":126},[53,14474,14475],{},"public CustomerPermissionEvaluator(ObjectFactory\u003CAuthorityService> authorityServiceObjectFactory) {\n",[53,14477,14478],{"class":55,"line":163},[53,14479,14480],{}," this.authorityServiceObjectFactory = authorityServiceObjectFactory;\n",[53,14482,14483],{"class":55,"line":186},[53,14484,282],{},[53,14486,14487],{"class":55,"line":221},[53,14488,14489],{},"@Override\n",[53,14491,14492],{"class":55,"line":242},[53,14493,14494],{},"public boolean hasPermission(Authentication authentication, Object targetDomainObject, Object permission) {\n",[53,14496,14497],{"class":55,"line":273},[53,14498,14499],{}," AuthorityService service = this.authorityServiceObjectFactory.getObject();\n",[53,14501,14502],{"class":55,"line":279},[53,14503,14504],{}," // ... do stuff with service\n",[53,14506,14507],{"class":55,"line":496},[53,14508,282],{},[18,14510,14511,14512,14515,14516,14519,14520,14523,14524,14527,14528,5881],{},"By using Springs ",[50,14513,14514],{},"ObjectFactory"," and calling its ",[50,14517,14518],{},"getObject()"," later (not during construction of the\n",[50,14521,14522],{},"CustomerPermissionEvaluator",") we delay the creation of ",[50,14525,14526],{},"AuthorityService"," (and of beans it needs such as the\n",[50,14529,14232],{},[18,14531,14532,14533,14535,14536,986],{},"By doing so they can later be processed by all ",[50,14534,14321],{}," and the log message in question disappeares &\ncaching works again ",[27,14537,14538],{},"o/",[18,14540,14541],{},"Note that there are other ways to solve the problem. One other thing I can think of is to change the order the\nPostProcessors are created.",[2207,14543,14545],{"id":14544},"final-thougths","Final Thougths",[18,14547,14548],{},"In conclusion you might want to actively watch the statements Spring logs at INFO and above during your applications\nbootstrap.",[18,14550,14551],{},"Especially the mentioned statement about beans being not eligible for auto-proxying should not contain any of your\nnon-infrastructure beans.",[18,14553,14554],{},"Also make sure there is no eager dependency between your infrastructure-related code and your business-logic. At least\ndouble-check and challenge these decisions. This is especially true because dependency-graphs get pretty big pretty\nfast. In our example there were 20 other “business” beans not eligable for auto-proxying because of the same eager\ndependency.",[607,14556,989],{},{"title":48,"searchDepth":86,"depth":86,"links":14558},[14559,14560,14561,14562,14563,14564],{"id":14177,"depth":86,"text":14178},{"id":14283,"depth":86,"text":14284},{"id":14325,"depth":86,"text":14326},{"id":14344,"depth":86,"text":14345},{"id":14422,"depth":86,"text":14423},{"id":14544,"depth":86,"text":14545},[613],"2016-11-04T09:45:17","https://synyx.de/blog/bean-x-of-type-y-is-not-eligible-for-getting-processed-by-all-beanpostprocessors/",{},"/blog/bean-x-of-type-y-is-not-eligible-for-getting-processed-by-all-beanpostprocessors",{"title":14165,"description":14174},"blog/bean-x-of-type-y-is-not-eligible-for-getting-processed-by-all-beanpostprocessors",[14573,7611,290,14574,1010],"bug","proxy","Recently we had a problem related Springs auto-proxy feature that I think is worth writing about. The Problem We use Spring as our framework of choice because it provides us…","g_18Z38wUIExfcwJbi-RkuL033DdE2J3_WTwAuhdGNo",{"id":14578,"title":14579,"author":14580,"body":14581,"category":16038,"date":16039,"description":16040,"extension":617,"link":16041,"meta":16042,"navigation":499,"path":16043,"seo":16044,"slug":14585,"stem":16046,"tags":16047,"teaser":16050,"__hash__":16051},"blog/blog/javascript-code-refactoring-automatisieren.md","JavaScript Code Refactoring automatisieren",[6892],{"type":11,"value":14582,"toc":16029},[14583,14586,14594,14614,14620,14624,14633,14640,14643,14659,14663,14666,14671,14679,14684,14687,14692,14695,14698,14706,14709,14713,14716,14733,14736,14740,14743,14752,14844,14847,14874,14877,14931,14933,14936,14965,14968,14971,15068,15109,15112,15126,15129,15154,15160,15205,15217,15316,15335,15347,15354,15406,15409,15423,15426,15440,15443,15446,15449,15469,15479,15612,15620,15626,15751,15754,15776,15782,15883,15886,15889,15901,15904,15918,15921,15925,15934,15937,15954,15998,16000,16003,16018,16023,16026],[14,14584,14579],{"id":14585},"javascript-code-refactoring-automatisieren",[18,14587,14588,14589,14593],{},"Vor kurzem hatte ich die Muße ein älteres JavaScript Projekt zu refactoren. Unter anderem sollte die Assertion\nBibliothek Jasmine von 1.x auf 2.x aktualisiert werden. Zwei Dinge gab es bei unseren Tests zu refactoren. Einmal die\nArt von asynchronen Specs und einmal die verwendeten Expectations.\nUnter ",[585,14590,14591],{"href":14591,"rel":14592},"http://jasmine.github.io/2.0/upgrading.html",[589]," wurde super\nbeschrieben was für Änderungen man genau machen muss beim Umstieg von Jasmine 1.x auf 2.x.",[18,14595,14596,14597,1628,14600,14603,14604,14607,14608,14613],{},"In diesem Artikel möchte ich zeigen, wie ich die Transformation der ",[50,14598,14599],{},"runs",[50,14601,14602],{},"waitsFor"," Blöcke zum neuen ",[50,14605,14606],{},"done","\nCallback Muster mittels ",[585,14609,14612],{"href":14610,"rel":14611},"https://github.com/facebook/jscodeshift",[589],"jscodeshift"," automatisiert habe.",[18,14615,14616],{},[2223,14617],{"alt":14618,"src":14619},"jasmine_async_vergleich","https://media.synyx.de/uploads//2016/08/jasmine_async_vergleich-1024x469.png",[2207,14621,14623],{"id":14622},"jscodeshift-recast-esprima-codemods","jscodeshift / recast / esprima / codemods",[18,14625,14626,14627,14632],{},"Jscodeshift ist ein Werkzeug dass von Facebook gebaut wurde und ",[585,14628,14631],{"href":14629,"rel":14630},"https://github.com/benjamn/recast",[589],"recast"," erweitert.\nDieses wiederum arbeitet mit dem Esprima Parser. Dieser baut einen abstrakten Syntaxbaum (engl. abstract source tree,\nAST) auf, der traversiert werden kann.",[18,14634,14635,14636,14639],{},"Mit jscodeshift ist es z. B. möglich, alle anonyme Funktionen herauszuholen und mit einem Namen ",[573,14637,14638],{},"ichBinNichtMehrAnonym","\nzu ersetzen. Ein weiteres, nettes Feature wie ich finde, ist die Beibehaltung der originalen Code Formatierung (so weit\nmöglich).",[18,14641,14642],{},"Man baut also ein Codeschnipsel welches anderen Code umschreibt. Dieses Codeschnipsel wird codemod genannt. Eine\nschnelle Google Suche bringt uns zu zwei interessanten Artikeln; die für das Folgende aber nicht unbedingt gelesen\nwerden müssen 🙂",[577,14644,14645,14652],{},[580,14646,14647],{},[585,14648,14651],{"href":14649,"rel":14650},"https://vramana.github.io/blog/2015/12/21/codemod-tutorial/",[589],"How to write a codemod",[580,14653,14654],{},[585,14655,14658],{"href":14656,"rel":14657},"https://medium.com/@cpojer/effective-javascript-codemods-5a6686bb46fb",[589],"Effective JavaScript Codemods",[2207,14660,14662],{"id":14661},"automatisieren-von-code-refactorings","Automatisieren von Code Refactorings",[18,14664,14665],{},"Warum automatisieren mag sich der ein oder andere denken. Das kann mehrere Gründe haben. Zum einen hat man vielleicht\nkeinen Auszubildenden zur Verfügung der die Tests umschreiben kann, zum anderen… Nein… Auszubildende bitte mit\njscodeshift ersetzen. Also für die Arbeit des Refactorings… Aber zurück zu den Gründen der Automatisierung.",[18,14667,14668],{},[27,14669,14670],{},"Spaß",[18,14672,14673,14674,14678],{},"Ich musste kurz überlegen wann ich ein Refactoring in JavaScript Projekten gemacht habe, bei dem ich auch Spaß hatte.\nMir fiel keines ein. Die Arbeit ",[14675,14676,14677],"del",{},"war"," ist stupide: Suchen und Ersetzen. Für das automatisierte Code Refactoring wird\nein Code Schnipsel geschrieben welches das Suchen und Ersetzen für mich übernimmt! Ich darf Code hacken!",[18,14680,14681],{},[27,14682,14683],{},"Zuverlässigkeit",[18,14685,14686],{},"Ein generelles Argument zum Automatisieren trifft denke ich auch bei Code Refactoring zu. Es wird immer zu jeder Zeit an\njeder Stelle das selbe gemacht. Es gibt keinen Finger der auf der Tastatur um eine Taste verrutscht. Es gibt keine\nUnachtsamkeit die zum Vergessen einer Stelle führt. Die Maschine erledigt zuverlässig was getan werden soll. Immer.\nJederzeit. Überall.",[18,14688,14689],{},[27,14690,14691],{},"Effektivität",[18,14693,14694],{},"Ist etwas automatisiert gilt es nur noch ein Knöpfchen zu drücken. Bezogen auf das Code Refactoring kann das Ergebnis in\nwenigen (Milli-)Sekunden bestaunt werden. Hier spreche ich aber noch nicht konkret von codemods und jscodeshift als\nWerkzeug. Eine Regex kann hier auch schon völlig ausreichen.",[18,14696,14697],{},"Eine Regex zum",[577,14699,14700,14703],{},[580,14701,14702],{},"löschen von Abschnitten wenn Bedingung A zutrifft",[580,14704,14705],{},"verschieben von Code Blöcken",[18,14707,14708],{},"kann entweder einmal geschrieben und nie wieder verstanden werden, oder ist gar unmöglich zu schreiben. Hier kommt\njscodeshift mit codemods zur Rettung.",[2207,14710,14712],{"id":14711},"codemods-zum-upgrade-von-jasmine-1x-auf-2x","Codemods zum Upgrade von Jasmine 1.x auf 2.x",[18,14714,14715],{},"Eine codemod ist ein Codeschnipsel welches vorhandenen Quellcode transformiert. Im unserem Fall der Jasmine Migrierung\nvon Version 1.x zu 2.x müssen transformiert werden:",[577,14717,14718,14721,14724,14727,14730],{},[580,14719,14720],{},"spies",[580,14722,14723],{},"asynchrone Tests",[580,14725,14726],{},"expectations",[580,14728,14729],{},"custom matchers (falls vorhanden)",[580,14731,14732],{},"clock",[18,14734,14735],{},"Wir wenden uns folgend den asynchronen Tests zu.",[649,14737,14739],{"id":14738},"asynchrone-tests-transformieren","Asynchrone Tests transformieren",[18,14741,14742],{},"Das Projekt das es zu refactored galt hatte überwiegend sehr einfach geschriebene asynchrone Tests. Perfekt für den\nEinstieg in jscodeshift.",[18,14744,14745,14746,14748,14749,14751],{},"Eine Variable die initial ",[50,14747,2305],{}," ist und nach $Aktion auf ",[50,14750,408],{}," gesetzt wird. Die Assertions werden dann erst\nausgeführt, wenn die Variable gesetzt wurde.",[43,14753,14757],{"className":14754,"code":14755,"language":14756,"meta":48,"style":48},"language-js shiki shiki-themes github-light github-dark","it(\"tests something async\", function () {\n var done;\n doSomethingAsync(function callback() {\n // assertions\n done = true;\n });\n waitsFor(function () {\n return done;\n });\n});\n","js",[50,14758,14759,14776,14784,14798,14803,14814,14819,14830,14836,14840],{"__ignoreMap":48},[53,14760,14761,14764,14766,14769,14771,14773],{"class":55,"line":56},[53,14762,14763],{"class":59},"it",[53,14765,1067],{"class":82},[53,14767,14768],{"class":63},"\"tests something async\"",[53,14770,99],{"class":82},[53,14772,6170],{"class":389},[53,14774,14775],{"class":82}," () {\n",[53,14777,14778,14781],{"class":55,"line":86},[53,14779,14780],{"class":389}," var",[53,14782,14783],{"class":82}," done;\n",[53,14785,14786,14789,14791,14793,14796],{"class":55,"line":126},[53,14787,14788],{"class":59}," doSomethingAsync",[53,14790,1067],{"class":82},[53,14792,6170],{"class":389},[53,14794,14795],{"class":59}," callback",[53,14797,7291],{"class":82},[53,14799,14800],{"class":55,"line":163},[53,14801,14802],{"class":3698}," // assertions\n",[53,14804,14805,14808,14810,14812],{"class":55,"line":186},[53,14806,14807],{"class":82}," done ",[53,14809,390],{"class":389},[53,14811,6374],{"class":89},[53,14813,1727],{"class":82},[53,14815,14816],{"class":55,"line":221},[53,14817,14818],{"class":82}," });\n",[53,14820,14821,14824,14826,14828],{"class":55,"line":242},[53,14822,14823],{"class":59}," waitsFor",[53,14825,1067],{"class":82},[53,14827,6170],{"class":389},[53,14829,14775],{"class":82},[53,14831,14832,14834],{"class":55,"line":273},[53,14833,11007],{"class":389},[53,14835,14783],{"class":82},[53,14837,14838],{"class":55,"line":279},[53,14839,14818],{"class":82},[53,14841,14842],{"class":55,"line":496},[53,14843,7422],{"class":82},[18,14845,14846],{},"Für jasmine 2.x müssen wir also",[577,14848,14849,14858,14868],{},[580,14850,14851,14852,14854,14855,14857],{},"einmal der Funktion die dem ",[50,14853,14763],{}," übergeben wird einen Parameter ",[50,14856,14606],{}," hinzufügen",[580,14859,14860,14863,14864,14867],{},[50,14861,14862],{},"done = true;"," mit ",[50,14865,14866],{},"done();"," ersetzen",[580,14869,14870,14871,14873],{},"und den ",[50,14872,14602],{}," Block löschen",[18,14875,14876],{},"Nach dem Refactoring soll das Ganze also wie folgt aussehen:",[43,14878,14880],{"className":14754,"code":14879,"language":14756,"meta":48,"style":48},"it(\"tests something async\", function (done) {\n doSomethingAsync(function callback() {\n // assertions\n done();\n });\n});\n",[50,14881,14882,14900,14912,14916,14923,14927],{"__ignoreMap":48},[53,14883,14884,14886,14888,14890,14892,14894,14896,14898],{"class":55,"line":56},[53,14885,14763],{"class":59},[53,14887,1067],{"class":82},[53,14889,14768],{"class":63},[53,14891,99],{"class":82},[53,14893,6170],{"class":389},[53,14895,7314],{"class":82},[53,14897,14606],{"class":6186},[53,14899,11325],{"class":82},[53,14901,14902,14904,14906,14908,14910],{"class":55,"line":86},[53,14903,14788],{"class":59},[53,14905,1067],{"class":82},[53,14907,6170],{"class":389},[53,14909,14795],{"class":59},[53,14911,7291],{"class":82},[53,14913,14914],{"class":55,"line":126},[53,14915,14802],{"class":3698},[53,14917,14918,14921],{"class":55,"line":163},[53,14919,14920],{"class":59}," done",[53,14922,7335],{"class":82},[53,14924,14925],{"class":55,"line":186},[53,14926,14818],{"class":82},[53,14928,14929],{"class":55,"line":221},[53,14930,7422],{"class":82},[1217,14932,14612],{"id":14612},[18,14934,14935],{},"Bevor wir loslegen können, müssen noch wenige Dinge erledigt werden.",[43,14937,14939],{"className":13786,"code":14938,"language":13788,"meta":48,"style":48},"\n$> npm install -g jscodeshift\n$> mkdir jasmineCodemods && cd jasmineCodemods\n$> git init && git commit -m \"initial commit\" --allow-empty\n$> touch jasmine-async.js\n\n",[50,14940,14941,14945,14950,14955,14960],{"__ignoreMap":48},[53,14942,14943],{"class":55,"line":56},[53,14944,500],{"emptyLinePlaceholder":499},[53,14946,14947],{"class":55,"line":86},[53,14948,14949],{},"$> npm install -g jscodeshift\n",[53,14951,14952],{"class":55,"line":126},[53,14953,14954],{},"$> mkdir jasmineCodemods && cd jasmineCodemods\n",[53,14956,14957],{"class":55,"line":163},[53,14958,14959],{},"$> git init && git commit -m \"initial commit\" --allow-empty\n",[53,14961,14962],{"class":55,"line":186},[53,14963,14964],{},"$> touch jasmine-async.js\n",[18,14966,14967],{},"Der Einfachkeit halber installieren wir jscodeshift global um das binary auf der Konsole ausführen zu können. Und das\nGit Repo zum einfachen hacken, sichern und zurückrollen darf auch nicht fehlen!",[18,14969,14970],{},"Dann legen wir uns eine Datei für der/die/das erste codemod an und schreiben folgenden Inhalt:",[43,14972,14974],{"className":14754,"code":14973,"language":14756,"meta":48,"style":48},"// jasmine-async.js\nmodule.exports = function transformer(file, api) {\n const j = api.jscodeshift;\n const { statement } = j.template;\n const root = j(file.source);\n return root;\n};\n",[50,14975,14976,14981,15011,15024,15042,15056,15063],{"__ignoreMap":48},[53,14977,14978],{"class":55,"line":56},[53,14979,14980],{"class":3698},"// jasmine-async.js\n",[53,14982,14983,14986,14988,14991,14993,14996,14999,15001,15004,15006,15009],{"class":55,"line":86},[53,14984,14985],{"class":89},"module",[53,14987,986],{"class":82},[53,14989,14990],{"class":89},"exports",[53,14992,1245],{"class":389},[53,14994,14995],{"class":389}," function",[53,14997,14998],{"class":59}," transformer",[53,15000,1067],{"class":82},[53,15002,15003],{"class":6186},"file",[53,15005,99],{"class":82},[53,15007,15008],{"class":6186},"api",[53,15010,11325],{"class":82},[53,15012,15013,15016,15019,15021],{"class":55,"line":126},[53,15014,15015],{"class":389}," const",[53,15017,15018],{"class":89}," j",[53,15020,1245],{"class":389},[53,15022,15023],{"class":82}," api.jscodeshift;\n",[53,15025,15026,15028,15031,15034,15037,15039],{"class":55,"line":163},[53,15027,15015],{"class":389},[53,15029,15030],{"class":82}," { ",[53,15032,15033],{"class":89},"statement",[53,15035,15036],{"class":82}," } ",[53,15038,390],{"class":389},[53,15040,15041],{"class":82}," j.template;\n",[53,15043,15044,15046,15049,15051,15053],{"class":55,"line":186},[53,15045,15015],{"class":389},[53,15047,15048],{"class":89}," root",[53,15050,1245],{"class":389},[53,15052,15018],{"class":59},[53,15054,15055],{"class":82},"(file.source);\n",[53,15057,15058,15060],{"class":55,"line":221},[53,15059,11818],{"class":389},[53,15061,15062],{"class":82}," root;\n",[53,15064,15065],{"class":55,"line":242},[53,15066,15067],{"class":82},"};\n",[18,15069,15070,15071,15074,15075,99,15078,99,15081,99,15084,15087,15088,15091,15092,8713,15097,15102,15103,15108],{},"Auf ",[50,15072,15073],{},"root"," können wir jetzt jscodeshift Methoden aufrufen wie ",[50,15076,15077],{},"find",[50,15079,15080],{},"filter",[50,15082,15083],{},"forEach",[50,15085,15086],{},"replaceWith"," und zuletzt\n",[50,15089,15090],{},"toSource",". Die Methoden machen genau das was der Name sagt, selbsterklärend. Genaueres muss man sich leider selbst\nim ",[585,15093,15096],{"href":15094,"rel":15095},"https://github.com/facebook/jscodeshift/blob/fe67b121d4c2519c5227a00be3f590e7f7c46d2b/src/Collection.js",[589],"Source",[585,15098,15101],{"href":15099,"rel":15100},"https://github.com/facebook/jscodeshift/tree/fe67b121d4c2519c5227a00be3f590e7f7c46d2b/src/collections",[589],"Code","\nauf ",[585,15104,15107],{"href":15105,"rel":15106},"https://github.com/facebook/jscodeshift/tree/fe67b121d4c2519c5227a00be3f590e7f7c46d2b",[589],"Github"," zusammenkratzen.",[18,15110,15111],{},"Ausführen können wir das Skript später mit",[43,15113,15115],{"className":13786,"code":15114,"language":13788,"meta":48,"style":48},"\n$> jscodeshift -t ./jasmine-async.js pfad/zur/source/datei\n\n",[50,15116,15117,15121],{"__ignoreMap":48},[53,15118,15119],{"class":55,"line":56},[53,15120,500],{"emptyLinePlaceholder":499},[53,15122,15123],{"class":55,"line":86},[53,15124,15125],{},"$> jscodeshift -t ./jasmine-async.js pfad/zur/source/datei\n",[18,15127,15128],{},"Doch zuerst müssen Transformationen gecoded werden 😮",[18,15130,15131,15132,15134,15135,15137,15138,15141,15142,15144,15145,15147,15148,15153],{},"Wir wollen fürs erste alle ",[50,15133,14763],{}," Knoten finden und der übergebenen Funktion einen ",[50,15136,14606],{}," Parameter spendieren. Zum Suchen\nvon Ausdrücken verwenden wir die jscodeshift Methode ",[50,15139,15140],{},"root.find",". Diese traversiert den AST und gibt uns eine Collection\nvon passenden Knoten zurück. Als Argument müssen wird dem ",[50,15143,15077],{}," Aufruf eine AST Beschreibung des ",[50,15146,14763],{}," Knotens mitgeben.\nBeim Finden der Beschreibung hilft uns der geniale ",[585,15149,15152],{"href":15150,"rel":15151},"https://astexplorer.net",[589],"astexplorer.net",". Wir kopieren den Code den\nwir transformieren wollen in den Editor und bekommen den AST ausgespuckt. Wir können sogar auf jeden beliebigen Knoten\nim Editor klicken und bekommen im AST den enstsprechenden Teil markiert!",[18,15155,15156],{},[2223,15157],{"alt":15158,"src":15159},"astexplorer","https://media.synyx.de/uploads//2016/08/astexplorer-1024x631.png",[43,15161,15163],{"className":14754,"code":15162,"language":14756,"meta":48,"style":48},"// jasmine-async.js\nreturn root.find(j.CallExpression, {\n callee: {\n name: \"it\",\n },\n});\n",[50,15164,15165,15169,15182,15187,15197,15201],{"__ignoreMap":48},[53,15166,15167],{"class":55,"line":56},[53,15168,14980],{"class":3698},[53,15170,15171,15174,15177,15179],{"class":55,"line":86},[53,15172,15173],{"class":389},"return",[53,15175,15176],{"class":82}," root.",[53,15178,15077],{"class":59},[53,15180,15181],{"class":82},"(j.CallExpression, {\n",[53,15183,15184],{"class":55,"line":126},[53,15185,15186],{"class":82}," callee: {\n",[53,15188,15189,15192,15195],{"class":55,"line":163},[53,15190,15191],{"class":82}," name: ",[53,15193,15194],{"class":63},"\"it\"",[53,15196,2252],{"class":82},[53,15198,15199],{"class":55,"line":186},[53,15200,2320],{"class":82},[53,15202,15203],{"class":55,"line":221},[53,15204,7422],{"class":82},[18,15206,15207,15208,15210,15211,15213,15214,15216],{},"Dann wollen wir für alle Knoten die gefunden werden etwas tun. Nämlich den ",[50,15209,14606],{}," Parameter hinzufügen zur eigentlichen\nTestfunktion. Mit ",[50,15212,15083],{}," können wir über die von ",[50,15215,15077],{}," zurückgegebene Collection iterieren und dies tun.",[43,15218,15220],{"className":14754,"code":15219,"language":14756,"meta":48,"style":48},"\n// jasmine-async.js\nreturn root\n .find(...)\n .forEach(p => {\n // p.node.arguments[0] would be the spec description\n const specCallee = p.node.arguments[1];\n // add 'done' parameter\n specCallee.params.push(statment`done`);\n })\n\n",[50,15221,15222,15226,15230,15237,15250,15265,15270,15288,15293,15311],{"__ignoreMap":48},[53,15223,15224],{"class":55,"line":56},[53,15225,500],{"emptyLinePlaceholder":499},[53,15227,15228],{"class":55,"line":86},[53,15229,14980],{"class":3698},[53,15231,15232,15234],{"class":55,"line":126},[53,15233,15173],{"class":389},[53,15235,15236],{"class":82}," root\n",[53,15238,15239,15242,15244,15246,15248],{"class":55,"line":163},[53,15240,15241],{"class":82}," .",[53,15243,15077],{"class":59},[53,15245,1067],{"class":82},[53,15247,12124],{"class":389},[53,15249,685],{"class":82},[53,15251,15252,15254,15256,15258,15260,15263],{"class":55,"line":186},[53,15253,15241],{"class":82},[53,15255,15083],{"class":59},[53,15257,1067],{"class":82},[53,15259,18],{"class":6186},[53,15261,15262],{"class":389}," =>",[53,15264,6176],{"class":82},[53,15266,15267],{"class":55,"line":221},[53,15268,15269],{"class":3698}," // p.node.arguments[0] would be the spec description\n",[53,15271,15272,15275,15278,15280,15283,15285],{"class":55,"line":242},[53,15273,15274],{"class":389}," const",[53,15276,15277],{"class":89}," specCallee",[53,15279,1245],{"class":389},[53,15281,15282],{"class":82}," p.node.arguments[",[53,15284,2546],{"class":89},[53,15286,15287],{"class":82},"];\n",[53,15289,15290],{"class":55,"line":273},[53,15291,15292],{"class":3698}," // add 'done' parameter\n",[53,15294,15295,15298,15301,15303,15306,15309],{"class":55,"line":279},[53,15296,15297],{"class":82}," specCallee.params.",[53,15299,15300],{"class":59},"push",[53,15302,1067],{"class":82},[53,15304,15305],{"class":59},"statment",[53,15307,15308],{"class":63},"`done`",[53,15310,1079],{"class":82},[53,15312,15313],{"class":55,"line":496},[53,15314,15315],{"class":82}," })\n",[18,15317,15318,15319,15321,15322,15324,15325,15328,15329,15331,15332,15334],{},"Die Variable ",[50,15320,18],{}," ist der Pfad des gefundenen Knotens. Man könnte die Variable auch ",[50,15323,4400],{}," benennen, würde sich aber\nbeißen mit dem node Modul ",[50,15326,15327],{},"const path = require('path');",". Das importieren dieses Moduls ist keine Seltenheit in\ncodemods denke ich. Und als Konvention nehmen wir einfach ",[50,15330,18],{}," statt ",[50,15333,4400],{},", immer!",[18,15336,15337,15338,15340,15341,15343,15344,15346],{},"Der ASTExplorer zeigt wie wir an die Funktion kommen der wir den ",[50,15339,14606],{}," Parameter hinzufügen möchten. Wir holen uns das\nzweite Element der CallExpression Argumente und fügen dessen Parameter Liste einfach das ",[50,15342,14606],{}," hinzu. Leider (?) können\nwir aber keinen String übergeben. Wir erinnern uns an den AST. Wir brauchen eine Beschreibung des Knotens. Man könnte\njetzt entweder ein komplexes Objekt erstellen, oder man nimmt sich einfach die nützliche ",[50,15345,15033],{}," Funktion zu Hilfe.\nAuf die Funktion machte mich ein Kollege aufmerksam. Sie ist leider nicht in der Doku zu finden sondern nur in codemods\nauf Github… Sagte ich schon, dass die Doku etwas spärlich ist?",[18,15348,15349,15350,15353],{},"Zum Abschluss müssen wir die Änderungen mit ",[50,15351,15352],{},"toSource()"," an jscodeshift zurückgeben um die Datei neu zu schreiben.",[43,15355,15357],{"className":14754,"code":15356,"language":14756,"meta":48,"style":48},"\n// jasmine-async.js\nreturn root\n .find(...)\n .forEach(...)\n .toSource()\n\n",[50,15358,15359,15363,15367,15373,15385,15397],{"__ignoreMap":48},[53,15360,15361],{"class":55,"line":56},[53,15362,500],{"emptyLinePlaceholder":499},[53,15364,15365],{"class":55,"line":86},[53,15366,14980],{"class":3698},[53,15368,15369,15371],{"class":55,"line":126},[53,15370,15173],{"class":389},[53,15372,15236],{"class":82},[53,15374,15375,15377,15379,15381,15383],{"class":55,"line":163},[53,15376,15241],{"class":82},[53,15378,15077],{"class":59},[53,15380,1067],{"class":82},[53,15382,12124],{"class":389},[53,15384,685],{"class":82},[53,15386,15387,15389,15391,15393,15395],{"class":55,"line":186},[53,15388,15241],{"class":82},[53,15390,15083],{"class":59},[53,15392,1067],{"class":82},[53,15394,12124],{"class":389},[53,15396,685],{"class":82},[53,15398,15399,15401,15403],{"class":55,"line":221},[53,15400,15241],{"class":82},[53,15402,15090],{"class":59},[53,15404,15405],{"class":82},"()\n",[18,15407,15408],{},"Zum schnellen Testen kann die Transformation auf der Konsole mit",[43,15410,15412],{"className":13786,"code":15411,"language":13788,"meta":48,"style":48},"\n$> jscodeshift -t ./jasmine-async.js pfad/zur/source/datei.js\n\n",[50,15413,15414,15418],{"__ignoreMap":48},[53,15415,15416],{"class":55,"line":56},[53,15417,500],{"emptyLinePlaceholder":499},[53,15419,15420],{"class":55,"line":86},[53,15421,15422],{},"$> jscodeshift -t ./jasmine-async.js pfad/zur/source/datei.js\n",[18,15424,15425],{},"ausgeführt werden. Nach dem ersten Staunen aber mit",[43,15427,15429],{"className":13786,"code":15428,"language":13788,"meta":48,"style":48},"\n$> git checkout HEAD -- pfad/zur/source/datei.js\n\n",[50,15430,15431,15435],{"__ignoreMap":48},[53,15432,15433],{"class":55,"line":56},[53,15434,500],{"emptyLinePlaceholder":499},[53,15436,15437],{"class":55,"line":86},[53,15438,15439],{},"$> git checkout HEAD -- pfad/zur/source/datei.js\n",[18,15441,15442],{},"zurück gesetzt werden.",[18,15444,15445],{},"Git o/",[18,15447,15448],{},"Der erste Punkt ist erledigt.",[577,15450,15451,15459,15465],{},[580,15452,15453],{},[14675,15454,14851,15455,14854,15457,14857],{},[50,15456,14763],{},[50,15458,14606],{},[580,15460,15461,14863,15463,14867],{},[50,15462,14862],{},[50,15464,14866],{},[580,15466,14870,15467,14873],{},[50,15468,14602],{},[18,15470,15471,15472,15474,15475,15478],{},"Ersetzen wir als nächstes ",[50,15473,14862],{}," mit dem ",[50,15476,15477],{},"done()"," Aufruf. Dazu klicken wir im ASTExplorer auf den entsprechenden\nAusdruck und schauen rechts im AST nach der Pfad Beschreibung die wir brauchen.",[43,15480,15482],{"className":14754,"code":15481,"language":14756,"meta":48,"style":48},"\n// jasmine-async.js\nreturn root\n .find(...)\n .forEach(p => {\n // ...\n // replace 'done = true' with done() invocation\n j(p).find(j.ExpressionStatement, {\n expression: {\n type: j.AssignmentExpression.name,\n left: {\n name: 'done'\n }\n }\n }).replaceWith(p => statement`done();`);\n })\n .toSource()\n\n",[50,15483,15484,15488,15492,15498,15510,15524,15529,15534,15547,15552,15557,15562,15570,15574,15579,15600,15604],{"__ignoreMap":48},[53,15485,15486],{"class":55,"line":56},[53,15487,500],{"emptyLinePlaceholder":499},[53,15489,15490],{"class":55,"line":86},[53,15491,14980],{"class":3698},[53,15493,15494,15496],{"class":55,"line":126},[53,15495,15173],{"class":389},[53,15497,15236],{"class":82},[53,15499,15500,15502,15504,15506,15508],{"class":55,"line":163},[53,15501,15241],{"class":82},[53,15503,15077],{"class":59},[53,15505,1067],{"class":82},[53,15507,12124],{"class":389},[53,15509,685],{"class":82},[53,15511,15512,15514,15516,15518,15520,15522],{"class":55,"line":186},[53,15513,15241],{"class":82},[53,15515,15083],{"class":59},[53,15517,1067],{"class":82},[53,15519,18],{"class":6186},[53,15521,15262],{"class":389},[53,15523,6176],{"class":82},[53,15525,15526],{"class":55,"line":221},[53,15527,15528],{"class":3698}," // ...\n",[53,15530,15531],{"class":55,"line":242},[53,15532,15533],{"class":3698}," // replace 'done = true' with done() invocation\n",[53,15535,15536,15539,15542,15544],{"class":55,"line":273},[53,15537,15538],{"class":59}," j",[53,15540,15541],{"class":82},"(p).",[53,15543,15077],{"class":59},[53,15545,15546],{"class":82},"(j.ExpressionStatement, {\n",[53,15548,15549],{"class":55,"line":279},[53,15550,15551],{"class":82}," expression: {\n",[53,15553,15554],{"class":55,"line":496},[53,15555,15556],{"class":82}," type: j.AssignmentExpression.name,\n",[53,15558,15559],{"class":55,"line":503},[53,15560,15561],{"class":82}," left: {\n",[53,15563,15564,15567],{"class":55,"line":509},[53,15565,15566],{"class":82}," name: ",[53,15568,15569],{"class":63},"'done'\n",[53,15571,15572],{"class":55,"line":515},[53,15573,3242],{"class":82},[53,15575,15576],{"class":55,"line":521},[53,15577,15578],{"class":82}," }\n",[53,15580,15581,15584,15586,15588,15590,15592,15595,15598],{"class":55,"line":527},[53,15582,15583],{"class":82}," }).",[53,15585,15086],{"class":59},[53,15587,1067],{"class":82},[53,15589,18],{"class":6186},[53,15591,15262],{"class":389},[53,15593,15594],{"class":59}," statement",[53,15596,15597],{"class":63},"`done();`",[53,15599,1079],{"class":82},[53,15601,15602],{"class":55,"line":533},[53,15603,15315],{"class":82},[53,15605,15606,15608,15610],{"class":55,"line":539},[53,15607,15241],{"class":82},[53,15609,15090],{"class":59},[53,15611,15405],{"class":82},[18,15613,15614,15615,15617,15618,986],{},"Da wir wissen, dass ",[50,15616,14862],{}," nur einmal vorkommt, können wir der Collection direkt sagen bitte ersetzen mit dem\nStatement ",[50,15619,14866],{},[18,15621,15622,15623,15625],{},"Die ",[50,15624,14606],{}," Variable ist jetzt natürlich obsolet und kann komplett entfernt werden. Wieder schauen wir im ASTExplorer\nnach der Beschreibung die wir brauchen um auf folgendes zu kommen:",[43,15627,15629],{"className":14754,"code":15628,"language":14756,"meta":48,"style":48},"\n// jasmine-async.js\nreturn root\n .find(...)\n .forEach(p => {\n // ...\n // get rid of 'var done = false'\n j(p).find(j.VariableDeclaration, {\n declarations: [\n {\n type: j.VariableDeclarator.name,\n id: {\n name: 'done'\n }\n }\n ]\n }).remove()\n })\n .toSource()\n\n",[50,15630,15631,15635,15639,15645,15657,15671,15675,15680,15691,15696,15700,15705,15710,15717,15722,15726,15730,15739,15743],{"__ignoreMap":48},[53,15632,15633],{"class":55,"line":56},[53,15634,500],{"emptyLinePlaceholder":499},[53,15636,15637],{"class":55,"line":86},[53,15638,14980],{"class":3698},[53,15640,15641,15643],{"class":55,"line":126},[53,15642,15173],{"class":389},[53,15644,15236],{"class":82},[53,15646,15647,15649,15651,15653,15655],{"class":55,"line":163},[53,15648,15241],{"class":82},[53,15650,15077],{"class":59},[53,15652,1067],{"class":82},[53,15654,12124],{"class":389},[53,15656,685],{"class":82},[53,15658,15659,15661,15663,15665,15667,15669],{"class":55,"line":186},[53,15660,15241],{"class":82},[53,15662,15083],{"class":59},[53,15664,1067],{"class":82},[53,15666,18],{"class":6186},[53,15668,15262],{"class":389},[53,15670,6176],{"class":82},[53,15672,15673],{"class":55,"line":221},[53,15674,15528],{"class":3698},[53,15676,15677],{"class":55,"line":242},[53,15678,15679],{"class":3698}," // get rid of 'var done = false'\n",[53,15681,15682,15684,15686,15688],{"class":55,"line":273},[53,15683,15538],{"class":59},[53,15685,15541],{"class":82},[53,15687,15077],{"class":59},[53,15689,15690],{"class":82},"(j.VariableDeclaration, {\n",[53,15692,15693],{"class":55,"line":279},[53,15694,15695],{"class":82}," declarations: [\n",[53,15697,15698],{"class":55,"line":496},[53,15699,2357],{"class":82},[53,15701,15702],{"class":55,"line":503},[53,15703,15704],{"class":82}," type: j.VariableDeclarator.name,\n",[53,15706,15707],{"class":55,"line":509},[53,15708,15709],{"class":82}," id: {\n",[53,15711,15712,15715],{"class":55,"line":515},[53,15713,15714],{"class":82}," name: ",[53,15716,15569],{"class":63},[53,15718,15719],{"class":55,"line":521},[53,15720,15721],{"class":82}," }\n",[53,15723,15724],{"class":55,"line":527},[53,15725,3242],{"class":82},[53,15727,15728],{"class":55,"line":533},[53,15729,3248],{"class":82},[53,15731,15732,15734,15737],{"class":55,"line":539},[53,15733,15583],{"class":82},[53,15735,15736],{"class":59},"remove",[53,15738,15405],{"class":82},[53,15740,15741],{"class":55,"line":545},[53,15742,15315],{"class":82},[53,15744,15745,15747,15749],{"class":55,"line":2414},[53,15746,15241],{"class":82},[53,15748,15090],{"class":59},[53,15750,15405],{"class":82},[18,15752,15753],{},"Zweiter Punkt auch erledigt.",[577,15755,15756,15764,15772],{},[580,15757,15758],{},[14675,15759,14851,15760,14854,15762,14857],{},[50,15761,14763],{},[50,15763,14606],{},[580,15765,15766],{},[14675,15767,15768,14863,15770,14867],{},[50,15769,14862],{},[50,15771,14866],{},[580,15773,14870,15774,14873],{},[50,15775,14602],{},[18,15777,15778,15779,15781],{},"Fehlt nur noch das Entfernen des ",[50,15780,14602],{}," Blocks. Richtig geraten! Der ASTExplorer zeigt uns was wir suchen müssen.",[43,15783,15785],{"className":14754,"code":15784,"language":14756,"meta":48,"style":48},"\n// jasmine-async.js\nreturn root\n .find(...)\n .forEach(p => {\n // ...\n // get rid of obsolete waitsFor block\n j(p).find(j.CallExpression, {\n callee: {\n name: 'waitsFor'\n }\n }).remove()\n })\n .toSource()\n\n",[50,15786,15787,15791,15795,15801,15813,15827,15831,15836,15846,15851,15859,15863,15871,15875],{"__ignoreMap":48},[53,15788,15789],{"class":55,"line":56},[53,15790,500],{"emptyLinePlaceholder":499},[53,15792,15793],{"class":55,"line":86},[53,15794,14980],{"class":3698},[53,15796,15797,15799],{"class":55,"line":126},[53,15798,15173],{"class":389},[53,15800,15236],{"class":82},[53,15802,15803,15805,15807,15809,15811],{"class":55,"line":163},[53,15804,15241],{"class":82},[53,15806,15077],{"class":59},[53,15808,1067],{"class":82},[53,15810,12124],{"class":389},[53,15812,685],{"class":82},[53,15814,15815,15817,15819,15821,15823,15825],{"class":55,"line":186},[53,15816,15241],{"class":82},[53,15818,15083],{"class":59},[53,15820,1067],{"class":82},[53,15822,18],{"class":6186},[53,15824,15262],{"class":389},[53,15826,6176],{"class":82},[53,15828,15829],{"class":55,"line":221},[53,15830,15528],{"class":3698},[53,15832,15833],{"class":55,"line":242},[53,15834,15835],{"class":3698}," // get rid of obsolete waitsFor block\n",[53,15837,15838,15840,15842,15844],{"class":55,"line":273},[53,15839,15538],{"class":59},[53,15841,15541],{"class":82},[53,15843,15077],{"class":59},[53,15845,15181],{"class":82},[53,15847,15848],{"class":55,"line":279},[53,15849,15850],{"class":82}," callee: {\n",[53,15852,15853,15856],{"class":55,"line":496},[53,15854,15855],{"class":82}," name: ",[53,15857,15858],{"class":63},"'waitsFor'\n",[53,15860,15861],{"class":55,"line":503},[53,15862,15578],{"class":82},[53,15864,15865,15867,15869],{"class":55,"line":509},[53,15866,15583],{"class":82},[53,15868,15736],{"class":59},[53,15870,15405],{"class":82},[53,15872,15873],{"class":55,"line":515},[53,15874,15315],{"class":82},[53,15876,15877,15879,15881],{"class":55,"line":521},[53,15878,15241],{"class":82},[53,15880,15090],{"class":59},[53,15882,15405],{"class":82},[18,15884,15885],{},"Fertig!",[18,15887,15888],{},"Schnell noch testen obs auch wirklich tut:",[43,15890,15891],{"className":13786,"code":15411,"language":13788,"meta":48,"style":48},[50,15892,15893,15897],{"__ignoreMap":48},[53,15894,15895],{"class":55,"line":56},[53,15896,500],{"emptyLinePlaceholder":499},[53,15898,15899],{"class":55,"line":86},[53,15900,15422],{},[18,15902,15903],{},"Und ab damit ins Repo",[43,15905,15907],{"className":13786,"code":15906,"language":13788,"meta":48,"style":48},"\n$> git commit -am \"jasmine async test upgrade from 1.x to 2.x; automated 🎉\"\n\n",[50,15908,15909,15913],{"__ignoreMap":48},[53,15910,15911],{"class":55,"line":56},[53,15912,500],{"emptyLinePlaceholder":499},[53,15914,15915],{"class":55,"line":86},[53,15916,15917],{},"$> git commit -am \"jasmine async test upgrade from 1.x to 2.x; automated 🎉\"\n",[18,15919,15920],{},"Mit dieser codemod können wir ab sofort mit einem Befehl zig JavaScript Dateien transformieren lassen o/ Wobei ich\ntrotzdem ein Code Review empfehlen würde. Und nicht blind auf den Master pushen.",[2207,15922,15924],{"id":15923},"ausblick","Ausblick",[18,15926,15927,15928,15930,15931,15933],{},"Zugegeben. Ich habe hier ein wirklich einfaches Beispiel gewählt. Aber trotzdem hat das viele Tests unseres Projektes\nabgedeckt. Ich habe noch ein paar Bedingungen hinzugefügt zur codemod wie z. B. bitte abbrechen, wenn Anzahl der\n",[50,15929,14602],{}," Blöcke != 1. Das ignoriert dann alles specs die synchron sind und asynchrone Tests die ein anderes Muster\naufweisen. Bei uns waren das z. B. Integrations Tests die mehrere ",[50,15932,14602],{}," Blöcke hatten weil mehrere Klicks simuliert\nwurden.",[18,15935,15936],{},"Es fehlen auch noch die restlichen jasmine Transformationen für Matchers und Spies. Aber ich denke es sollte recht klar\ngeworden sein wie auch dafür codemods geschrieben werden können.",[18,15938,15939,15940,15945,15946,15949,15950,15953],{},"Werden codemods komplexer kann man über Unit Tests nachdenken. Hier gibt es Hilfe von jscodeshift. Als Beispiel\nhilft ",[585,15941,15944],{"href":15942,"rel":15943},"https://github.com/cpojer/js-codemod/blob/82af3089f22fa0687159f64177b73908b82d074f/transforms/__tests__/arrow-function-test.js",[589],"diese Stelle hier",".\nKurzer Ablauf: Als TestRunner wird jest benötigt. Der Quellcode der transformiert und der Quellcode der herausspringen\nsoll, werden jeweils als Datei im Verzeichnis ‘",[27,15947,15948],{},"testfixtures","’ abgelegt. Der Test liegt im Verzeichnis\n‘",[27,15951,15952],{},"tests","’ und definiert mittels den TestUtils einfach nur den Test. Codemods testgetrieben entwickeln steht nichts\nmehr im Weg.",[43,15955,15957],{"className":14754,"code":15956,"language":14756,"meta":48,"style":48},"// jasmine-async.spec.js\nconst defineTest = require(\"jscodeshift/dist/testUtils\").defineTest;\ndefineTest(__dirname, \"jasmine-async\");\n",[50,15958,15959,15964,15985],{"__ignoreMap":48},[53,15960,15961],{"class":55,"line":56},[53,15962,15963],{"class":3698},"// jasmine-async.spec.js\n",[53,15965,15966,15969,15972,15974,15977,15979,15982],{"class":55,"line":86},[53,15967,15968],{"class":389},"const",[53,15970,15971],{"class":89}," defineTest",[53,15973,1245],{"class":389},[53,15975,15976],{"class":59}," require",[53,15978,1067],{"class":82},[53,15980,15981],{"class":63},"\"jscodeshift/dist/testUtils\"",[53,15983,15984],{"class":82},").defineTest;\n",[53,15986,15987,15990,15993,15996],{"class":55,"line":126},[53,15988,15989],{"class":59},"defineTest",[53,15991,15992],{"class":82},"(__dirname, ",[53,15994,15995],{"class":63},"\"jasmine-async\"",[53,15997,1079],{"class":82},[2207,15999,969],{"id":968},[18,16001,16002],{},"Trotz spärlicher Doku findet man sich nach gewisser Zeit zurecht. Vor allem der ASTExplorer ist eine große Hilfe dabei!\nFür der/die/das erste codemod habe ich vielleicht 3x länger gebraucht, als im Projekt alles per Hand zu ersetzen. Aber\nkennt man das Vorgehen mit jscodeshift gleicht sich das schnell wieder aus. Und der/die/das codemod kann wiederverwendet\nwerden! Wenn auch mit kleinen Anpassungen für andere Gegebenheiten.",[18,16004,16005,16006,16011,16012,16017],{},"Oft verwendet habe ich bisher die ",[585,16007,16010],{"href":16008,"rel":16009},"https://github.com/cpojer/js-codemod",[589],"js-codemods","\nvon ",[585,16013,16016],{"href":16014,"rel":16015},"https://twitter.com/cpojer",[589],"@cpojer"," zum transformieren zu neuen es2015 Sprachfeatures.",[18,16019,16020],{},[27,16021,16022],{},"Meine Empfehlung:",[18,16024,16025],{},"Einmal reinknien und machen! Vielleicht sogar für kleinere Projekte.",[607,16027,16028],{},"html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sJ8bj, html code.shiki .sJ8bj{--shiki-default:#6A737D;--shiki-dark:#6A737D}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .s4XuR, html code.shiki .s4XuR{--shiki-default:#E36209;--shiki-dark:#FFAB70}",{"title":48,"searchDepth":86,"depth":86,"links":16030},[16031,16032,16033,16036,16037],{"id":14622,"depth":86,"text":14623},{"id":14661,"depth":86,"text":14662},{"id":14711,"depth":86,"text":14712,"children":16034},[16035],{"id":14738,"depth":126,"text":14739},{"id":15923,"depth":86,"text":15924},{"id":968,"depth":86,"text":969},[613],"2016-08-25T09:56:03","Vor kurzem hatte ich die Muße ein älteres JavaScript Projekt zu refactoren. Unter anderem sollte die Assertion\\nBibliothek Jasmine von 1.x auf 2.x aktualisiert werden. Zwei Dinge gab es bei unseren Tests zu refactoren. Einmal die\\nArt von asynchronen Specs und einmal die verwendeten Expectations.\\nUnter http://jasmine.github.io/2.0/upgrading.html wurde super\\nbeschrieben was für Änderungen man genau machen muss beim Umstieg von Jasmine 1.x auf 2.x.","https://synyx.de/blog/javascript-code-refactoring-automatisieren/",{},"/blog/javascript-code-refactoring-automatisieren",{"title":14579,"description":16045},"Vor kurzem hatte ich die Muße ein älteres JavaScript Projekt zu refactoren. Unter anderem sollte die Assertion\nBibliothek Jasmine von 1.x auf 2.x aktualisiert werden. Zwei Dinge gab es bei unseren Tests zu refactoren. Einmal die\nArt von asynchronen Specs und einmal die verwendeten Expectations.\nUnter http://jasmine.github.io/2.0/upgrading.html wurde super\nbeschrieben was für Änderungen man genau machen muss beim Umstieg von Jasmine 1.x auf 2.x.","blog/javascript-code-refactoring-automatisieren",[16048,7265,16049],"automatisierung","refactoring","Vor kurzem hatte ich die Muße ein älteres JavaScript Projekt zu refactoren. Unter anderem sollte die Assertion Bibliothek Jasmine von 1.x auf 2.x aktualisiert werden. Zwei Dinge gab es bei…","npdOLNgV0j3l4jQhircA5afYDyTYYRrnbICTKF9u9Mw",{"id":16053,"title":16054,"author":16055,"body":16058,"category":16389,"date":16390,"description":16391,"extension":617,"link":16392,"meta":16393,"navigation":499,"path":16394,"seo":16395,"slug":16062,"stem":16397,"tags":16398,"teaser":16401,"__hash__":16402},"blog/blog/synyx-goto-amsterdam.md","synyx GOTO Amsterdam",[6892,16056,16057],"hammann","klem",{"type":11,"value":16059,"toc":16384},[16060,16063,16077,16081,16084,16087,16090,16096,16099,16102,16105,16109,16112,16115,16118,16121,16124,16127,16130,16133,16139,16147,16150,16158,16161,16165,16176,16179,16194,16197,16200,16220,16232,16235,16250,16270,16276,16279,16286,16313,16328,16333,16336,16346,16366,16372,16375,16381],[14,16061,16054],{"id":16062},"synyx-goto-amsterdam",[18,16064,16065,16066,16071,16072,16076],{},"Vom 13. bis 15.06.2016 waren wir zu siebt in Amsterdam auf der ",[585,16067,16070],{"href":16068,"rel":16069},"http://gotocon.com/amsterdam-2016/",[589],"goto; Amsterdam",".\nZunächst gibt es einen kleinen Reisebericht zu lesen auf den dann ein paar Impressionen aus den einzelnen Sessions und\nTalks folgen. Wer direkt zu den inhaltlichen Schwerpunkten unserer Reise springen möchte bitte ",[585,16073,16075],{"href":16074},"#goto","hier"," entlang.",[2207,16078,16080],{"id":16079},"die-ankunft","Die Ankunft",[18,16082,16083],{},"„Ich hol mir nen rohen Hering“, stellte Marc beim Ausstieg in Amsterdam klar. Dem wurde nicht widersprochen, allerdings\nwolle man zunächst das Gepäck zum Hostel bringen.",[18,16085,16086],{},"Dort angekommen, wurde der Plan beschlossen den Hotdog-Laden „Fat Dog“ aufzusuchen, der bereits in der Kochshow\n„Kitchen impossible“ lobend erwähnt wurde.",[18,16088,16089],{},"Nach einem mindestens zehn stündigen Fußmarsch kamen wir dann sogar dort an und hatten ordentlich Hunger mitgebracht.\nDer Großteil der Gruppe hatte sich extra für das bevorstehende Match: Deutschland – Ukraine passend mit einem mehr oder\nweniger aktuellen Deutschlandtrikot gekleidet. Dies wurde sofort bemerkt woraufhin wir auf das Tagesspecial aufmerksam\ngemacht wurden.",[18,16091,16092],{},[2223,16093],{"alt":16094,"src":16095},"\"IMG-20160612-WA0006\"","https://media.synyx.de/uploads//2016/06/IMG-20160612-WA0006-1.jpg",[18,16097,16098],{},"Im Nachhinein betrachtet, hätten wir diesem Angebot mehr Beachtung schenken sollen. Denn die Hotdogs, die wir\nbestellten, reichten gerade dafür aus, die verbrauchte Energie für den 15 stündigen Fußmarsch wiederherzustellen. „Ich\nhätte noch Lust auf einen Hering“ stellte Marc erneut fest.",[18,16100,16101],{},"Allerdings rückte der Anpfiff näher und wir mussten noch eine geeignete Bar finden, um das Spiel zu verfolgen. Einige\nGehminuten später wurden wir auch fündig. Gut gelaunt verließen wir nach dem erfolgreichen 2:0 die Bar und machten uns\nauf den Wunsch von Honnel auf die Suche nach einem Krokettenautomaten (Ja richtig gelesen…). David übernahm die\nNavigation und führte uns zu besagtem Automaten. (…) Glücklich und zufrieden legten wir uns schlafen und sammelten\nEnergie für den nächsten Tag.",[18,16103,16104],{},"zZz …Hering…murmel murmel…zZzZ",[2207,16106,16108],{"id":16107},"amsterdam-kompakt","Amsterdam kompakt",[18,16110,16111],{},"Den Montag wollten wir dazu nutzen uns ein wenig mit Amsterdam vertraut zu machen. Beim gemeinsamen Frühstück, wurde\nbesprochen, wie man den Tag am besten Nutzen könnte. Hierbei entstanden zwei Pläne:",[18,16113,16114],{},"• Mit dem Rad ans Meer",[18,16116,16117],{},"• Ab ins Rijksmuseum",[18,16119,16120],{},"Die Planung für die Radtour gestaltete sich etwas schwierig, da aus den Wetterberichten verschiedener Apps keine klare\nTendenz hervorging. Wir entschieden uns daher für die etwas kürzere Variante. Zunächst suchten wir einen Fiets verhuur (\nFahrradverleih) auf und bekamen drei Fahrräder. Zwei davon hatten sogar eine Klingelautomatik (War halt kaputt und hat\nbei jeder Bodenwelle gebimmelt…) integriert!",[18,16122,16123],{},"Ansonsten waren die Fahrräder allerdings super, also machten wir uns auf den Weg in Richtung Meer. David als Navigator\nvoraus und der dauerklingelnde Marc und ich hinterher.",[18,16125,16126],{},"Zielsicher navigierte uns David im Zick Zack durch Amsterdam.",[18,16128,16129],{},"Brücke…Links…Rechts…Brücke…Rechts… Oh hier geht’s nicht weiter… Aber da drüben!",[18,16131,16132],{},"Nach einigen Minuten hatten wir aus der Stadt herausgefunden und radelten geradewegs aufs Meer zu.",[18,16134,16135],{},[2223,16136],{"alt":16137,"src":16138},"\"Radtour_Meer\"","https://media.synyx.de/uploads//2016/06/Radtour_Meer.jpg",[18,16140,16141,16142,16146],{},"Da das Wetter noch hielt und wir noch voller Energie waren, entschlossen wir noch etwas weiter zu fahren. Zum Glück!\nSonst hätten wir nie diese Ziege\nentdeckt!",[2223,16143],{"alt":16144,"src":16145},"\"Radtour_Ziege\"","https://media.synyx.de/uploads//2016/06/Radtour_Ziege.jpg","\nSchließlich in Marken angekommen, legten wir eine kurze Pause ein bevor wir uns auf den Rückweg machten. Leider begann\nes nun etwas zu regnen. Marc und David zogen das Tempo an. Ich hechelte hinterher.",[18,16148,16149],{},"Die Ziege hatte mittlerweile Gebrauch von ihrer Hütte gemacht und sich vollständig zurückgezogen. Glücklicherweise hörte\nes nach einem kurzen Schauer dann auch wieder auf zu regnen und wir kamen halbwegs trocken und zufrieden wieder im\nHostel an und machten uns auf die Suche nach dem Rest unserer Gruppe.",[18,16151,16152,16153,16157],{},"In der Zwischenzeit bewunderte der Rest der synyx-Reisegruppe die kulturellen Schätze des Rijksmuseum, allen voran\nRembrandt, Van Gogh und andere große Meister. Auch die Abteilung der „Trinkspiele“, sowie das ein oder andere Werk\nbrachten einem zum\nSchmunzeln.",[2223,16154],{"alt":16155,"src":16156},"\"Museum\"","https://media.synyx.de/uploads//2016/06/IMG-20160613-WA0014.jpg","\nAnbetracht des Wetters war das Museum sehr gut besucht, sodass man sich ab und an durch die Menschenmassen vor dem\nBildern kämpfen musste. Auch die Umleitungssituation im Museum führte doch zu einigen Verwirrungen. Nach dem\nanstrengenden und überaus anspruchsvollen Museumsbesuch hatten wir uns eine Stärkung verdient. Allerdings war auch zu\ndiesem Zeitpunkt kein Hering in Sicht sodass wir einen ordentlichen Burger und anschließende Stroopwafel mit Genuß\nverzehrten. Zu diesem Zeitpunkt stießen dann auch die wilden Radler (gerade nochmal rechtzeitig) dazu.",[18,16159,16160],{},"Zusammen ging es dann weiter bei einer klassischen Grachtenfahrt, die sich recht schnell zur einer drei-sprachigen\nGiebelführung durch Amsterdam entpuppte.",[2207,16162,16164],{"id":16163},"die-konferenz-goto-amsterdam-2016","Die Konferenz: goto; Amsterdam 2016",[18,16166,16167,16171,16172,11914],{},[2223,16168],{"alt":16169,"src":16170},"\"IMG_20160614_101157\"","https://media.synyx.de/uploads//2016/06/IMG_20160614_101157.jpg","\nNachdem wir den ersten Tag die Stadt unsicher gemacht hatten, ging es jetzt weiter mit dem eigentlichen Ziel der Reise,\ndie ",[585,16173,16175],{"href":16068,"rel":16174},[589],"goto; Amsterdam 2016",[18,16177,16178],{},"Die Konferenz bot mehrere Tracks in unterschiedlichen Bereichen der Softwareentwicklung (Data, Spring, Legacy to\nMicroservices, JavaScript und Security) aber auch für nicht- technische Themen wurden in eigenen Track wie Post-Agile\noder im Philosophy Track interessante Vorträge gehalten. Alles in allem ein sehr breit ausgestelltes Programm und für\njeden etwas dabei. Zusätzlich waren die Keynotes mit großen Persönlichkeiten aus unterschiedlichsten Bereichen besetz.",[18,16180,16181,16182,16187,16188,16193],{},"Den ",[585,16183,16186],{"href":16184,"rel":16185},"http://gotocon.com/amsterdam-2016/presentations/show_presentation.jsp?oid=7505",[589],"Anfang"," machte Erich Gamma der über\nEntwicklung von Visual Studio Code, eins der wohl größten JavaScript basierten Projekte, berichtete. Es wurde eine\nkomplette IDE auf Basis vom Webtechnologien geschaffen, welche mit Features einer herkömmlichen IDE ausgestattet ist und\nauf allen gängigen Betriebssystemen verfügbar ist aufgrund des auf ",[585,16189,16192],{"href":16190,"rel":16191},"http://electron.atom.io/",[589],"Electron"," basierten\nAufbaus. Ein spannendes Projekt was durchaus mit den richtigen Ansprüchen daher kommt Stichwort: “Eating your own dog\nfood” was den direkten Einsatz der IDE bei Mircosoft mit einschließt. Allerdings wurde man während des Vortrags das\nGefühl nicht los, dass der Vortrag vor allem dazu dient Entwickler für das Microsoft Universum zu gewinnen. Zusätzlich\nhätte Erich Gamma tatsächlich mal eine Runde Live-Demo Beratung bei Josh Long nehmen können. Aber dazu später mehr.",[18,16195,16196],{},"Weiter ging es in sechs nebenläufigen Tracks die immer wieder und zur Mittagszeit etwas länger mit Pausen und sehr\nhochwertigen Essen unterbrochen wurden.",[18,16198,16199],{},"Hier ein paar Berichte aus einzelnen Sessions die uns am meisten begeistert haben:",[18,16201,16202,16203,16208,16209,16214,16215,16219],{},"Der Data Track beschäftigte sich mit Themen wie der Verarbeitung von massenhaften Daten bspw. mit maschinellen\nLernverfahren. Aber auch das Verständnis für Daten und daraus resultierenden Annahmen wurde betrachtet. Dazu\nmachte",[585,16204,16207],{"href":16205,"rel":16206},"https://lukasvermeer.wordpress.com/",[589],"Lukas Vermeer","\nmit ",[585,16210,16213],{"href":16211,"rel":16212},"http://gotocon.com/amsterdam-2016/presentation/Data%20Science%20vs.%20Data%20Alchemy",[589],"Data Sciene vs Data Alchemy","\nden Anfang und zeigte die Fallstricke der Geschichte auf die Aufgrund von Daten oder besser gesagt Beobachtungen gemacht\nwurden und sich allerdings am Ende oft als Trugschluss herausstellten. Ein Beispiel wurde anhand von Sauerkraut auf den\nSeeschiffen des 17 und 18 Jahrhunderts gemacht, welches zwar für den Anwendungsfall der Skorbut Krankheit (Armut an\nVitamin C)\nausreichte. ",[2223,16216],{"alt":16217,"src":16218},"\"Screenshot from 2016-06-21 09-19-58\"","https://media.synyx.de/uploads//2016/06/Screenshot-from-2016-06-21-09-19-58.png","\nAllerdings war diese Annahme beispielsweise für die Südpolexpeditionen nicht mehr richtig. So reichte die Vitaminmenge\nder verwendeten Sauerkrautvariante einfach nicht aus um die Versorgung der Expetitionsteilnehmer mit dem lebenswichtigen\nVitamin zu decken.",[11259,16221,16222,16227],{},[18,16223,16224],{},[573,16225,16226],{},"“Science is limited by data",[18,16228,16229],{},[573,16230,16231],{},"Data is limited by engineering”",[18,16233,16234],{},"Diese Kernaussage wurde durch viele Beispiele aus der Geschichte unterstrichen und steht gerade im Hinblick auf\nService-Angeboten wie kaggle.com oder ähnlichen Möglichkeiten zur Datenanalyse im direkten Konflikt, da letztendlich\nquasi jede Aussage aus Daten generiert werden kann auf Basis gewisser Annahmen.",[18,16236,16237,16238,16243,16244,16249],{},"Auch mit vielen Daten und jede Menge Last ging es weiter im Vortrag\nvon ",[585,16239,16242],{"href":16240,"rel":16241},"https://twitter.com/rusmeshenberg",[589],"Ruslan Meshenberg","\nüber",[585,16245,16248],{"href":16246,"rel":16247},"http://gotocon.com/amsterdam-2016/presentation/Microservices%20at%20Netflix%20Scale%20-%20First%20Principles,%20Tradeoffs%20&%20Lessons%20Learned",[589],"Microservices at Netflix Scale",".\nEr berichtete aus der Umstellung der Netflix Architektur auf eine Microservice basierte Architektur und berief sich\ndabei auf einige Grundprinzipien als Quintessenz.",[577,16251,16252,16255,16264,16267],{},[580,16253,16254],{},"Buy vs Build: Zuerst Open Source Software unterstützen und nur im Ausnahmefall auf eigene Lösungen setzen",[580,16256,16257,16258,16263],{},"Services should be stateless: Zur Überprüfung steht\ndie ",[585,16259,16262],{"href":16260,"rel":16261},"http://techblog.netflix.com/2011/07/netflix-simian-army.html",[589],"simian army","bereit, siehe Bild 🙂",[580,16265,16266],{},"Scale out vs. scale up: Da vertikale Skalierung irgendwann an Hardwaregrenzen stößt (mindest schneller)",[580,16268,16269],{},"Automate destructive testing: Vor allem auch in der Produktivumgebung",[18,16271,16272],{},[2223,16273],{"alt":16274,"src":16275},"\"png;base6475ef3cbae089d72b\"","https://media.synyx.de/uploads//2016/06/pngbase6475ef3cbae089d72b.png",[18,16277,16278],{},"Zum Letzen Punkt wurde noch eine Anmerkung gemacht, dass solche Test sinnvoller Weise nur während den Geschäftszeiten\ngemacht werden sollen. Der Grund ist einleuchtend:",[11259,16280,16281],{},[18,16282,16283],{},[573,16284,16285],{},"“Its a bad thing if your services are failing and your team is sleeping or drunk or both”",[18,16287,16288,16289,16294,16295,16300,16301,16306,16307,16312],{},"Auch für die Container-Freunde gab es etwas zu hören. ",[585,16290,16293],{"href":16291,"rel":16292},"https://twitter.com/saturnism",[589],"Ray Tsang"," von Google\nund ",[585,16296,16299],{"href":16297,"rel":16298},"https://twitter.com/ArjenWassink",[589],"Arjen Wassink"," hielten einen\ngemeinsamen ",[585,16302,16305],{"href":16303,"rel":16304},"http://gotocon.com/amsterdam-2016/presentation/Java-Based%20Microservices,%20Containers,%20Kubernetes%20-%20How%20To",[589],"Vortrag","\nindem eine Java Anwendung auf einem Kubernetes-Cluster mit Docker-Containern live deployt\nwurde. ",[585,16308,16311],{"href":16309,"rel":16310},"http://kubernetes.io/",[589],"Kubernetes"," ist ein Tool von Google, dass zur Verwaltung und Skalierung von Containern\nentwickelt wurde. Ray Tsang zeigte wie flexibel und doch einfach benutzbar Kubernetes ist. Als besonderer Effekt wurde\nder Kubernes-Kluster auf einem eigens mitgebrachten Datencenter betrieben, dass sich aus 5 Raspberry Pi zusammensetzte.\nHiermit sollte demonstriert werden, dass Kubernetes unabhängig von der Umgebung benutzt werden kann. Ob es sich dabei um\nGoogles Cloud Platform oder einen Raspberry Pi-Kluster handelt spielt keine Rolle.",[18,16314,16315,16316,16321,16322,16327],{},"Ein weiterer beeindruckender Vortrag gab es von ",[585,16317,16320],{"href":16318,"rel":16319},"https://twitter.com/starbuxman",[589],"Josh Long"," der mit seinem\nVortrag ",[585,16323,16326],{"href":16324,"rel":16325},"http://gotocon.com/amsterdam-2016/presentation/Cloud%20Native%20Java",[589],"Cloud Native Java"," neue\nGeschwindigkeitsrekorde aufstellte (sowohl auf der Tonspur als auch beim Live Coding). Die vorgestellten Projekte aus\ndem Spring Ökosystem zur Orchestrierung von Microservice-basierten Anwendungen wurden live an der Tastatur erarbeitet.\nDabei wurde ein Einblick geliefert wie eine mögliche Architektur einer Microservice-basierten Anwendung aussieht.\nAußerdem stellte er die unter dem Dach von Spring Cloud zusammengefassten Module vor. Dabei sei allerdings erwähnt, dass\nviele der Arbeiten durch Netflix veröffentlicht wurden und Einzug in das Ökosystem von Spring gehalten haben. Trotz der\nhohen Geschwindigkeit war auch noch Zeit für Humor:",[11259,16329,16330],{},[18,16331,16332],{},"“everytime you do field injection, a unit test dies.”",[18,16334,16335],{},"Neben vielen technischen Talks überzeugte die goto; Amsterdam Konferenz vor allem mit dem Philosophie Track.",[11259,16337,16338,16343],{},[18,16339,16340],{},[573,16341,16342],{},"“Jeder ist besser als ich. Ich bin nicht so gut wie andere denken und bald werden sie es herausfinden.”",[18,16344,16345],{},"(Imposter Syndrom)",[18,16347,16348,16353,16354,16359,16360,16365],{},[585,16349,16352],{"href":16350,"rel":16351},"https://twitter.com/nativewired",[589],"Gitte Klitgaard’s"," Vortrag über\ndas ",[585,16355,16358],{"href":16356,"rel":16357},"https://de.wikipedia.org/wiki/Hochstapler-Syndrom",[589],"Imposter Syndrom"," gilt es wohl hervorzuheben. Sie berichtete\nüber negative Konsequenzen wie Burn Out als auch über Chancen und positive Aspekte wie Individualität und die stetige\nSelbstverbesserung. Auch eine Folie spendiert bekam\nder ",[585,16361,16364],{"href":16362,"rel":16363},"https://de.wikipedia.org/wiki/Dunning-Kruger-Effekt",[589],"Dunning-Kruger-Effekt",", welcher die Selbstüberschätzung von\nAnfängern beschreibt. Traf einmal ein ein extrovertierte, seblbstbewusster Anfänger auf einen Experten mit Imposter\nSyndrom…Muss man mal drüber nachgedacht haben.",[18,16367,16368],{},[2223,16369],{"alt":16370,"src":16371},"dunning-kruger-effekt","https://web.archive.org/web/20160609113938if_/http://terrycolon.com/quotes/a-e/D-Keffect2.gif",[18,16373,16374],{},"Insgesamt eine runde Veranstaltung, welche durch viele spannende Vorträge in einer tollen Location und mit sehr gutem\nEssen glänzte.",[18,16376,16377],{},[2223,16378],{"alt":16379,"src":16380},"\"RYJ-8J34F\"","https://media.synyx.de/uploads//2016/06/RYJ-8J34F.jpg",[18,16382,16383],{},"Eins ist gewiss wir kommen wieder! …und nicht bloß wegen dem verpassten Hering 😉",{"title":48,"searchDepth":86,"depth":86,"links":16385},[16386,16387,16388],{"id":16079,"depth":86,"text":16080},{"id":16107,"depth":86,"text":16108},{"id":16163,"depth":86,"text":16164},[613],"2016-06-23T09:37:39","Vom 13. bis 15.06.2016 waren wir zu siebt in Amsterdam auf der goto; Amsterdam.\\nZunächst gibt es einen kleinen Reisebericht zu lesen auf den dann ein paar Impressionen aus den einzelnen Sessions und\\nTalks folgen. Wer direkt zu den inhaltlichen Schwerpunkten unserer Reise springen möchte bitte hier entlang.","https://synyx.de/blog/synyx-goto-amsterdam/",{},"/blog/synyx-goto-amsterdam",{"title":16054,"description":16396},"Vom 13. bis 15.06.2016 waren wir zu siebt in Amsterdam auf der goto; Amsterdam.\nZunächst gibt es einen kleinen Reisebericht zu lesen auf den dann ein paar Impressionen aus den einzelnen Sessions und\nTalks folgen. Wer direkt zu den inhaltlichen Schwerpunkten unserer Reise springen möchte bitte hier entlang.","blog/synyx-goto-amsterdam",[16399,16400,5846],"amsterdam","goto","Vom 13. bis 15.06.2016 waren wir zu siebt in Amsterdam auf der goto; Amsterdam. Zunächst gibt es einen kleinen Reisebericht zu lesen auf den dann ein paar Impressionen aus den…","0iW4jrfW1wl3Ta6EPCFAqG6w8AEHQsIlfG32M2tIsa4",{"id":16404,"title":16405,"author":16406,"body":16408,"category":16506,"date":16507,"description":16415,"extension":617,"link":16508,"meta":16509,"navigation":499,"path":16510,"seo":16511,"slug":16412,"stem":16512,"tags":16513,"teaser":16517,"__hash__":16518},"blog/blog/werte-diskurs-bei-synyx.md","Werte-Diskurs bei synyx!",[16407],"meseck",{"type":11,"value":16409,"toc":16504},[16410,16413,16416,16419,16424,16427,16438,16443,16446,16449,16455,16458,16461,16464,16467,16470,16473,16476,16485,16494],[14,16411,16405],{"id":16412},"werte-diskurs-bei-synyx",[18,16414,16415],{},"Klingt langweilig und abgedroschen? Nicht bei uns!",[18,16417,16418],{},"Gegen Ende des letzten Jahres entfachte sich eine spannende Diskussion über Werte. Die Idee dahinter: ein gemeinsames\nWertebild für unser Unternehmen zu finden. Denn synyx ist die letzten Jahre kontinuierlich gewachsen, weswegen sich die\nalten Unternehmenswerte verändert haben. Es war uns wichtig zusammen unsere Werte zu reflektieren.",[18,16420,16421],{},[27,16422,16423],{},"Doch wofür diesen ganzen Aufwand?",[18,16425,16426],{},"Dafür gibt es verschiedene Gründe:",[12474,16428,16429,16432,16435],{},[580,16430,16431],{},"In der täglichen Arbeit miteinander werden zahlreiche Entscheidungen getroffen. Die Grundlage hierfür sind konkrete\nWerte. Müssen wir eine unternehmerische Entscheidung für oder gegen einen Kunden treffen, prüfen wir individuell, ob\nder Kunde in unser Wertesystem passt. Da uns beispielsweise soziale Verantwortung am Herzen liegt, arbeiten wir nicht\nmit Waffenunternehmen zusammen. Mit Hilfe der Wertediskussion wird Transparenz in solchen Entscheidungsprozesse\ngeschaffen.",[580,16433,16434],{},"Aber nicht nur bei der Auswahl neuer Kunden, sondern auch neuer Mitarbeiter stellt sich die Frage, ob diese mit ihren\nWerten zu unseren Werten passen. Infolge sehr unterschiedlicher Werte kann es zu Konflikten innerhalb des\nUnternehmens kommen.",[580,16436,16437],{},"Die Annahme, dass Unternehmenswerte einmal formuliert für immer so bleiben, widerspricht unserer Erfahrung. Mit der\nZeit wandeln sich aufgrund verschiedener Einflüsse diese Werte. Die Frage, was dieser Wandel der Werte für uns\nbedeutet, ob er gut oder schlecht ist, beleuchten wir in regelmäßigen Abständen. Denn nur dadurch ist es uns möglich,\nproaktiv den Kurs zu verändern. Wenn wir diese Tatsache jedoch missachten, laufen wir Gefahr, dass sich Werte\nverfestigen, die unserem Unternehmen schaden. Denn Werte entscheiden auch, wie wir uns untereinander Verhalten und\nwie wir interagieren. Die Summe dieser Verhalten spiegelt sich in der Firmenkultur wieder. So gesehen sind Werte das\nFundament jeder Kultur. Entsteht zum Beispiel eine Kultur der Unzufriedenheit, kann dies oft ein Zeichen dafür sein,\ndass sich Werte verändert haben.",[18,16439,16440],{},[27,16441,16442],{},"Der Wertediskurs im synyx-Style:",[18,16444,16445],{},"Typischerweise finden solche Diskurse hinter verschlossenen Manager-Türen statt. Das Ergebnis wird dann der ganzen\nBelegschaft präsentiert. Wir haben uns für einen anderen Weg entschieden. Einen Weg, der durch seine intensive und\nlebhafte Diskussion sehr zeitaufwändig ist. Denn eine Diskussion über Werte bietet eine gute Möglichkeit, dass sich alle\nbei synyx daran beteiligen können. Dadurch hat jeder die Chance zu einem gemeinsamen Wertebild zu stehen.",[18,16447,16448],{},"Genau dieses Vorgehen an den Wertediskurs zeigt einen Teil unserer Werte, die wir hier bei synyx leben: Individualität,\nMitbestimmung und Offenheit. Wir sind anders und stehen dazu. Uns ist es wichtig, dass jeder gehört werden kann. Dabei\nbildet eine offene und ehrliche Kommunikation den Grundstein unserer Zusammenarbeit.",[18,16450,16451,16452],{},"Nun die interessante Frage: ",[27,16453,16454],{},"Wie haben wir bei synyx den Wertediskurs geführt?",[18,16456,16457],{},"Ganz einfach: pragmatisch. Indem sich alle an einen Tisch setzten und miteinander diskutierten, konnte sich ein offener\nDiskussionsraum entfalten. Um den Rahmen nicht zu sprengen, wurde die Diskussion von Moderatoren begleitet. Für den\nDiskussionsinhalt durfte jeder Werte aufschreiben, die für ihn bei synyx wichtig sind. Diese Werte wurden währenddessen\ngesammelt. Da einige Werte ähnlich oder gleich waren, wurden sie anschließend geclustert. Um die Werte jedoch sinnvoll\nclustern zu können, musste die Bedeutung der jeweiligen Werte geklärt werden. Denn Werte wie Respekt, Eigenverantwortung\noder Engagement bieten viel Platz für Interpretation. Die Klärung der Bedeutung ist notwendig und herausfordernd\nzugleich. Denn erst dadurch werden aus abstrakten Werten ein greifbares und konkretes Werteverständnis. An dieser Stelle\nfindet bereits ein großer Teil der Wertschöpfung des Wertediskurses statt.",[18,16459,16460],{},"Als Ergebnis dieser mehrstündigen Diskussion entstanden aus über 100 Zetteln mit Werten 30 Werte-Cluster.",[18,16462,16463],{},"Einige Tage später setzten wir den Workshop fort. Im zweiten Teil fokussierten wir uns auf die 30 Cluster, um diese\n“einzudampfen”. Die Schwierigkeit dieser Arbeit bestand darin, auf einen Teil der Werte zu verzichten. Die Fähigkeit,\nsich gemeinschaftlich auf die wesentlichen Werte zu konzentrieren und zu einigen, bildete den Mehrwert dieser\nDiskussion. Am Ende kristallisierten sich neun Werte-Cluster aus. Diese neun Werte wurden anschließend zusammen\npriorisiert. Denn je nach Entscheidung, müssen Werte gegeneinander abgewogen werden. Zum Beispiel kann eine Entscheidung\nzu Gunsten des Wertes der Kundenzufriedenheit getroffen werden, jedoch kurzfristig die Mitarbeiterbegeisterung\neinschränken. Mit Hilfe einer Priorisierung wird Transparenz und Klarheit in diesen Prozess geschaffen.",[18,16465,16466],{},"Damit wertvolle Diskussionsergebnisse aus den Workshops nicht verloren gehen, wurden im letzten Schritt die Werte\nbeschrieben. Ebenfalls wurde für jeden Wert ein aussagekräftiger Satz formuliert, der den Wert greifbar und fühlbar\nmacht. Diese Werte sollen natürlich nicht in irgendwelchen Schubladen vergammeln, sondern für alle sichtbar sein.\nDeshalb werden wir sie an einer unserer Wände im Büro visualisieren lassen. Der erste Entwurf kann bei uns besichtigt\nwerden (siehe Fotos unten).",[18,16468,16469],{},"Komm vorbei und lass dich inspirieren 🙂",[18,16471,16472],{},"Wer sich für weiterführende Literatur interessiert:",[18,16474,16475],{},"Buch: Michael Loebbert, Kultur entscheidet: kulturelle Muster in Unternehmen erkennen und verändern, Springer Gabler\n2015",[18,16477,16478,16479,16484],{},"Paper: Dr. Bernhard v.\nGuretzky, ",[585,16480,16483],{"href":16481,"rel":16482},"http://www.community-of-knowledge.de/fileadmin/user_upload/attachments/Werte_im_Unternehmen.pdf",[589],"Werte im Unternehmen",",\n2006",[18,16486,16487,16488,16493],{},"Promotion: Sandra\nNiedermeier, ",[585,16489,16492],{"href":16490,"rel":16491},"https://edoc.ub.uni-muenchen.de/17504/1/Niedermeier_Sandra.pdf",[589],"Wertebildung im Unternehmen: Theoretische Grundlagen und Implementation",",\n2014",[18,16495,16496,8713,16500],{},[2223,16497],{"alt":16498,"src":16499},"\"werte_01\"","https://media.synyx.de/uploads//2016/06/werte_01.jpg",[2223,16501],{"alt":16502,"src":16503},"\"werte\"","https://media.synyx.de/uploads//2016/06/werte.jpg",{"title":48,"searchDepth":86,"depth":86,"links":16505},[],[613,5834],"2016-06-15T16:27:02","https://synyx.de/blog/werte-diskurs-bei-synyx/",{},"/blog/werte-diskurs-bei-synyx",{"title":16405,"description":16415},"blog/werte-diskurs-bei-synyx",[6885,16514,16515,16516],"unternehmenskultur","unternehmenswerte","werte-diskurs","Klingt langweilig und abgedroschen? Nicht bei uns! Gegen Ende des letzten Jahres entfachte sich eine spannende Diskussion über Werte. Die Idee dahinter: ein gemeinsames Wertebild für unser Unternehmen zu finden.…","V79-PCGdH9b1CFr_VDtyzZ8-c8Ty67nBhD2zz60kjVY",{"id":16520,"title":16521,"author":16522,"body":16524,"category":16590,"date":16591,"description":16592,"extension":617,"link":16593,"meta":16594,"navigation":499,"path":16595,"seo":16596,"slug":16598,"stem":16599,"tags":16600,"teaser":16603,"__hash__":16604},"blog/blog/our-days-parallel-2016.md","Our days @ para//el 2016",[16523],"clausen",{"type":11,"value":16525,"toc":16588},[16526,16529,16544,16553,16562,16565,16570,16579,16582,16585],[14,16527,16521],{"id":16528},"our-days-parael-2016",[18,16530,16531,16532,16537,16538,16543],{},"Last week Stefan and me took part as guests at the ",[585,16533,16536],{"href":16534,"rel":16535},"http://parallelcon.de/",[589],"para//el conference"," in Heidelberg. The\nactual ",[585,16539,16542],{"href":16540,"rel":16541},"http://parallelcon.de/programm.php",[589],"program"," was separated into 2 keynotes, one per day, and 36 talks, 18 per\nday, three at a time (in parallel). Right at the beginning it was said, that we were two out of 150 participants, which,\nI would say, leads to a nice atmosphere. It wasn’t difficult to attend a discussion or meet other people during the\npauses and I haven’t further experienced, quite contrary to other conferences, any large queueing at launch. Why? Well,\nsomeone encountered that a table, if placed correctly, offers space for more than one line at the buffet, which is quite\na good news! 🙂",[18,16545,16546,16547,16552],{},"We’ve both really enjoyed the days viewed from that angle, so lets talk about the conference and the talks itself. It\nbecomes visible, that concurrency or parallism – a detail that people tend to interprete quite differently or have\ndifferent opionions about the exact meaning – is not considered something that is only subject to languages like C/C++\nor to specific topics of the IT like embedded programming or ",[585,16548,16551],{"href":16549,"rel":16550},"https://en.wikipedia.org/wiki/Supercomputer",[589],"HPC",", but\nrather something that can be discussed in a broader sense.",[18,16554,16555,16556,16561],{},"At synyx, we program most of the time in Java and talks had been scheduled very nicely without any or less overlap,\nwhich is true for other languages as well, but my point here is, that, concurrency on the JVM is getting more and more\nattention nowadays. When did you read the last time about the memory model of the JVM? But sometimes, it makes even more\nsense to listen to sessions, that do not cross your every-day-borders, just to see how problems get solved or pattern\napplied by different people or divisions. What’s a cache-line, volatile, atomic? What are the implications to\nmulti-core or multi-socket environments, and what if we put a JVM in between? A slight change to a system, for\ninstance, a change to the compiler, JVM or even a different CPU model might have an impact, due some underlying rules\nand optimization strategies, including hard and software. Visibility constraints might be a good keyword here and it was\nquite interesting to hear about so many different aspects again. The speakers did a good job to paint the picture with\nsmaller examples: a + b + c might not be c + b + a under certain circumstances. While this problem is not related to\nparallism at first, but rather to ",[585,16557,16560],{"href":16558,"rel":16559},"https://en.wikipedia.org/wiki/Numerical_analysis",[589],"numerical precision errors",", it\nmight be more visible to those environments and reveals the great spectrum of possible talks everyone could attend.",[18,16563,16564],{},"So, when I go through all of the sessions in my mind again, there was one challenging question to me, quite a bit\nphilosophic maybe, at least to me.",[18,16566,16567],{},[573,16568,16569],{},"In a concurrent world, how much precision would you be willing to relinquish, for a correct view of the shared world?",[18,16571,16572,16573,16578],{},"I would like to keep this open for now, you may want to think about it on your own, but you may also want to anticipate\nthe outer rim of the IT. I can really recommend the reading of ",[585,16574,16577],{"href":16575,"rel":16576},"http://jcip.net/",[589],"java concurrency in practice"," from\nGoetz Brian et. al to everyone who couldn’t join the para//el in 2016 and wants to know more about the concurrent world.\nThreads are not evil, if used with the right abstraction and pooling helps to minimize the initialization costs.",[18,16580,16581],{},"In general, there is only one thing that I’ve missed along the talks about performance, correctness and theory, which is\ntest. I’d really like to gain more insights about how people verify concurrent code. How fine grained should we\nformulate tests? What can we say about tools, software, patterns or even a simple setup? Might be even more complex for\nthe Embedded world.",[18,16583,16584],{},"Well, might be relevant for 2017, might not be relevant, we will see.",[18,16586,16587],{},"Thanks to everyone who made the conference to what we’ve seen in 2016!",{"title":48,"searchDepth":86,"depth":86,"links":16589},[],[613],"2016-04-11T09:25:02","Last week Stefan and me took part as guests at the para//el conference in Heidelberg. The\\nactual program was separated into 2 keynotes, one per day, and 36 talks, 18 per\\nday, three at a time (in parallel). Right at the beginning it was said, that we were two out of 150 participants, which,\\nI would say, leads to a nice atmosphere. It wasn’t difficult to attend a discussion or meet other people during the\\npauses and I haven’t further experienced, quite contrary to other conferences, any large queueing at launch. Why? Well,\\nsomeone encountered that a table, if placed correctly, offers space for more than one line at the buffet, which is quite\\na good news! 🙂","https://synyx.de/blog/our-days-parallel-2016/",{},"/blog/our-days-parallel-2016",{"title":16521,"description":16597},"Last week Stefan and me took part as guests at the para//el conference in Heidelberg. The\nactual program was separated into 2 keynotes, one per day, and 36 talks, 18 per\nday, three at a time (in parallel). Right at the beginning it was said, that we were two out of 150 participants, which,\nI would say, leads to a nice atmosphere. It wasn’t difficult to attend a discussion or meet other people during the\npauses and I haven’t further experienced, quite contrary to other conferences, any large queueing at launch. Why? Well,\nsomeone encountered that a table, if placed correctly, offers space for more than one line at the buffet, which is quite\na good news! 🙂","our-days-parallel-2016","blog/our-days-parallel-2016",[16601,7721,290,16602],"concurrency","parallism","Last week Stefan and me took part as guests at the para//el conference in Heidelberg. The actual program was separated into 2 keynotes, one per day, and 36 talks, 18…","gVStre4dXOOMAefhjnopDpeh9ljMwq_ok3cq7NrRQKk",{"id":16606,"title":16607,"author":16608,"body":16609,"category":17014,"date":17015,"description":17016,"extension":617,"link":17017,"meta":17018,"navigation":499,"path":17019,"seo":17020,"slug":17022,"stem":17023,"tags":17024,"teaser":17028,"__hash__":17029},"blog/blog/springboot-reactjs-progressive-enhancement-based-on-list-sorting.md","springboot & reactjs #2 | progressive enhancement based on list sorting",[6892],{"type":11,"value":16610,"toc":17006},[16611,16614,16623,16626,16629,16633,16678,16680,16683,16686,16691,16698,16701,16704,16711,16733,16744,16747,16754,16761,16764,16771,16790,16796,16799,16802,16805,16819,16822,16845,16859,16862,16872,16878,16885,16888,16891,16894,16908,16927,16930,16939,16955,16959,16982,16988,16992,16999,17004],[14,16612,16607],{"id":16613},"springboot-reactjs-2-progressive-enhancement-based-on-list-sorting",[18,16615,16616,16617,16622],{},"This is the second article of a springboot & reactjs article series about server side rendering and progressive\nenhancement. In the ",[585,16618,16621],{"href":16619,"rel":16620},"https://synyx.de/2016/03/universal-webapp-development-with-spring-boot-react/",[589],"first article"," we\nhave learned how to render a ReactJS app on the server with nashorn. However, actually it is not really an “app” yet.\nCurrently we just see a static list of awesome products…",[18,16624,16625],{},"Today we will implement the sorting feature that should work with a plain html form submit as well as with a Ajax\nRequest and client side rendering. So the app is progressively enhanced with JavaScript o/",[16627,16628],"hr",{},[2207,16630,16632],{"id":16631},"springboot-reactjs-article-series","springboot & reactjs article series",[12474,16634,16635,16643,16672,16675],{},[580,16636,16637,16642],{},[585,16638,16641],{"href":16639,"rel":16640},"https://synyx.de/2016/03/springboot-reactjs-server-side-rendering",[589],"server side rendering"," ✅",[580,16644,16645,16646],{},"progressive enhancement based on list sorting 🆕\n",[577,16647,16648,16654,16660,16666],{},[580,16649,16650],{},[585,16651,16653],{"href":16652},"#html-form-and-server-side-rendering","HTML form and server side rendering",[580,16655,16656],{},[585,16657,16659],{"href":16658},"#enhance-the-client","Enhance the client",[580,16661,16662],{},[585,16663,16665],{"href":16664},"#make-the-back-button-work-again","Make the back button work again",[580,16667,16668],{},[585,16669,16671],{"href":16670},"#what-do-we-have-learned-so-far","What do we have learned so far",[580,16673,16674],{},"improving developer experience",[580,16676,16677],{},"lessons learned",[16627,16679],{},[18,16681,16682],{},"While the JavaScript solution with client side rendering results in faster rendering and therefore a better user\nexperience, it also has it’s costs. We have to manage the browser url by ourselves. Furthermore we have to implement the\nbrowsers back button feature /o",[18,16684,16685],{},"But let’s take one step after another…",[18,16687,16688],{},[27,16689,16690],{},"tl;dr",[18,16692,16693],{},[585,16694,16697],{"href":16695,"rel":16696},"https://github.com/synyx/springboot-reactjs-demo",[589],"project source code is available on github",[2207,16699,16653],{"id":16700},"html-form-and-server-side-rendering",[18,16702,16703],{},"Before we can even start thinking about the back button we have to implement the sorting feature. So we create a plain\nhtml form first that fires a good old get request.",[18,16705,16706,16707,16710],{},"The ProductFilterItem is a simple input field of type radio(button). For the moment the sorting should only consider one\nattribute. Therefore a radiobutton group is the way to go. So every input is defined with ",[50,16708,16709],{},"name=\"sort\""," and a label to\nincrease the clickable area.",[18,16712,16713,16714,16717,16718,16721,16722,16725,16726,16728,16729,16732],{},"In the previous blog the ",[27,16715,16716],{},"ProductList"," was the only component to render and therefore the entry in ",[27,16719,16720],{},"main.js",". But\nthe new ",[27,16723,16724],{},"ProductFilter"," is not part of the list. The ",[27,16727,16716],{}," reacts to the filter parameter set by the user.\nSo we have to create an ",[27,16730,16731],{},"App"," container that combines our awesome ProductList and ProductFilter components.",[18,16734,16735,16736,16739,16740,16743],{},"Since we have a container now to combine the ProductList and the ProductFilter components we also have to adjust the\n",[50,16737,16738],{},"global.renderServer"," function. First we add a second parameter ",[50,16741,16742],{},"sortBy"," to be able to render the selected radio button.\nThen we must render the new App container instead of the plain ProductList.",[18,16745,16746],{},"That’s it for the frontend part!",[18,16748,16749,16750,16753],{},"Next we need to extend the backend controller to process the ",[50,16751,16752],{},"sort"," request parameter defined in the ProductFilter form\nand use it to sort the product list.",[18,16755,16756,16757,16760],{},"Additionally the ",[50,16758,16759],{},"React#renderProducts"," method must be extended, too, of course.",[18,16762,16763],{},"And that’s it with the backend part as well!",[18,16765,16766,16767,16770],{},"Now build the frontend, start the spring boot app, open your browser on ",[50,16768,16769],{},"http://localhost:8080"," and start sorting the\nawesome product list 🙂",[43,16772,16774],{"className":13786,"code":16773,"language":13788,"meta":48,"style":48},"\n$ npm run build\n$ ./gradlew bootRun\n\n",[50,16775,16776,16780,16785],{"__ignoreMap":48},[53,16777,16778],{"class":55,"line":56},[53,16779,500],{"emptyLinePlaceholder":499},[53,16781,16782],{"class":55,"line":86},[53,16783,16784],{},"$ npm run build\n",[53,16786,16787],{"class":55,"line":126},[53,16788,16789],{},"$ ./gradlew bootRun\n",[18,16791,16792],{},[2223,16793],{"alt":16794,"src":16795},"awesome_productlist_sorting","https://media.synyx.de/uploads//2016/04/awesome_productlist_sorting.gif",[2207,16797,16659],{"id":16798},"enhance-the-client",[18,16800,16801],{},"So far our awesome product list is fully functional. Let’s recap what we can do now.",[18,16803,16804],{},"We are able to:",[577,16806,16807,16810,16813,16816],{},[580,16808,16809],{},"see the awesome product info",[580,16811,16812],{},"sort the awesome products by name or price",[580,16814,16815],{},"use the browser’s back and forward button (static site!)",[580,16817,16818],{},"bookmark every single view",[18,16820,16821],{},"As the next step we want to increase the user experience with AJAX requests and client side rendering. This results in a\nmuch quicker feedback for the user as requesting the whole html document.",[18,16823,16824,16825,16829,16830,16834,16835,2252,16838,99,16841,16844],{},"To fetch data dynamically from the server we have to prevent the native form submit and take over the control by\nourselves. React provides [lifecycle methods](",[585,16826,16827],{"href":16827,"rel":16828},"http://git@gitlab-test.synyx.coffee",[589],":\nseber/SynyxBibliothek.git ",[585,16831,16832],{"href":16832,"rel":16833},"https://facebook.github.io/react/docs/component-specs.html#lifecycle-methods",[589],") like ",[50,16836,16837],{},"onClick",[50,16839,16840],{},"onChange",[50,16842,16843],{},"onSubmit",", etc. So we simply have to register a handler for the form submit. The handler does nothing but\nto prevent the native behaviour and to inform the consumer of the ProductFilter about the submit.",[18,16846,16847,16848,16850,16851,16853,16854,11914],{},"You may ask why we are subscribing our submit handler via ",[50,16849,16843],{}," on the form and not the ",[50,16852,16837],{}," hook on the\nsubmit button. Well, actually we could listen to the button click. But then we had to keep track of all form data by\nourselves… And the html form element already provides all this data as\na ",[585,16855,16858],{"href":16856,"rel":16857},"https://developer.mozilla.org/en-US/docs/Web/API/HTMLFormElement/elements",[589],"HTMLFormControlsCollection",[18,16860,16861],{},"Since we use Reacts API for DOM event handling, we have to render our awesome product list on the client, too. We only\nhave server side rendering at this moment, remember? 😉",[18,16863,16864,16865,16867,16868,16871],{},"So additionally to the ",[50,16866,16738],{}," function used by Nashorn we need a second function ",[50,16869,16870],{},"window.renderClient","\nthat we have to call on the client side (browser) as we will see later in this tutorial.",[18,16873,16874,16875,16877],{},"Next we have to add the initial rendering to the index.html template. Of course, we must call ",[50,16876,16870],{}," with\nthe same data as on the server. However, React prints a nice error message on the browser console if the data differs (\nmeans the client side rendering would result in another DOM structure as the already existing one).",[18,16879,16880,16881,16884],{},"Back to the Java backend we have to inject the initial product list and the sortBy value into the server side model of\nthe ",[50,16882,16883],{},"ProductController.java"," class. Additionally we add a second endpoint to provide the sorted product list as json.",[18,16886,16887],{},"That’s it!",[18,16889,16890],{},"A form submit now fetches the minimal data from the server and the client takes care about the rendering. Awesome,\nright? If only this queasy feeling wouldn’t be there… Right… The browser url is not changing anymore /o And if it\ncouldn’t be worse… Without url changing we also lost the power of the glorious back button.",[2207,16892,16665],{"id":16893},"make-the-back-button-work-again",[18,16895,16896,16897,16902,16903,986],{},"Okay, at first we should face the browser url. With HTML5 we’ve gained\nthe ",[585,16898,16901],{"href":16899,"rel":16900},"https://developer.mozilla.org/en-US/docs/Web/API/History_API",[589],"window.history"," api which is supported by all modern\nbrowsers. Changing the url is as simple as pushing the new state into the history\nwith ",[585,16904,16907],{"href":16905,"rel":16906},"https://developer.mozilla.org/en-US/docs/Web/API/History_API#Adding_and_modifying_history_entries",[589],"window.history.pushState",[18,16909,16910,16911,16914,16915,16918,16919,16922,16923,16926],{},"Next we want to listen to the browsers back and forward buttons. This can be implemented with subscribing to the\n",[50,16912,16913],{},"popstate"," event. The subscription is done within ",[50,16916,16917],{},"componentDidMount"," since we only want to subscribe in the browser\nenvironment. ",[50,16920,16921],{},"Constructor"," and it’s counterpart ",[50,16924,16925],{},"componentWillMount"," are both called on server side creating the static\nhtml markup.",[18,16928,16929],{},"Finally we made it 🙂",[18,16931,16932,16933,16938],{},"Go on, build the frontend, start the spring boot app, open ",[573,16934,16935],{},[585,16936,16769],{"href":16769,"rel":16937},[589]," and admire our awesome progressively\nenhanced product list. Functional without JavaScript and even better with enabled JavaScript.",[43,16940,16941],{"className":13786,"code":16773,"language":13788,"meta":48,"style":48},[50,16942,16943,16947,16951],{"__ignoreMap":48},[53,16944,16945],{"class":55,"line":56},[53,16946,500],{"emptyLinePlaceholder":499},[53,16948,16949],{"class":55,"line":86},[53,16950,16784],{},[53,16952,16953],{"class":55,"line":126},[53,16954,16789],{},[2207,16956,16958],{"id":16957},"what-do-we-have-learned-so-far","What do we have learned so far?",[577,16960,16961,16971,16979],{},[580,16962,16963,16964,16967,16968],{},"use plain HTML ",[50,16965,16966],{},"\u003Cform>"," element and enhance it with JavaScript and ",[50,16969,16970],{},"event.preventDefault",[580,16972,16973,16974,11792,16976,16978],{},"use ",[50,16975,16901],{},[50,16977,16913],{}," event to handle the browser back/forward button on the client",[580,16980,16981],{},"manually rebuilding and reloading the ReactJS app still sucks (autoreload would be cool, right)",[18,16983,16984],{},[2223,16985],{"alt":16986,"src":16987},"progressive_js","https://media.synyx.de/uploads//2016/04/progressive_js.gif",[2207,16989,16991],{"id":16990},"the-next-steps-will-be","The next steps will be",[577,16993,16994,16997],{},[580,16995,16996],{},"using webpack to enhance the developer experience",[580,16998,16677],{},[18,17000,17001],{},[27,17002,17003],{},"Stay tuned and keep learning!",[607,17005,989],{},{"title":48,"searchDepth":86,"depth":86,"links":17007},[17008,17009,17010,17011,17012,17013],{"id":16631,"depth":86,"text":16632},{"id":16700,"depth":86,"text":16653},{"id":16798,"depth":86,"text":16659},{"id":16893,"depth":86,"text":16665},{"id":16957,"depth":86,"text":16958},{"id":16990,"depth":86,"text":16991},[613],"2016-04-08T16:00:09","This is the second article of a springboot & reactjs article series about server side rendering and progressive\\nenhancement. In the first article we\\nhave learned how to render a ReactJS app on the server with nashorn. However, actually it is not really an “app” yet.\\nCurrently we just see a static list of awesome products…","https://synyx.de/blog/springboot-reactjs-progressive-enhancement-based-on-list-sorting/",{},"/blog/springboot-reactjs-progressive-enhancement-based-on-list-sorting",{"title":16607,"description":17021},"This is the second article of a springboot & reactjs article series about server side rendering and progressive\nenhancement. In the first article we\nhave learned how to render a ReactJS app on the server with nashorn. However, actually it is not really an “app” yet.\nCurrently we just see a static list of awesome products…","springboot-reactjs-progressive-enhancement-based-on-list-sorting","blog/springboot-reactjs-progressive-enhancement-based-on-list-sorting",[290,7265,17025,17026,1010,17027],"react","reactjs","springboot","This is the second article of a springboot & reactjs article series about server side rendering and progressive enhancement. In the first article we have learned how to render a…","-bm-m-HOVsX8sO3gHZgQPhuKHIL62ikfjs8JOfJb9w8",{"id":17031,"title":17032,"author":17033,"body":17034,"category":17616,"date":17617,"description":17618,"extension":617,"link":17619,"meta":17620,"navigation":499,"path":17621,"seo":17622,"slug":17623,"stem":17624,"tags":17625,"teaser":17626,"__hash__":17627},"blog/blog/springboot-reactjs-server-side-rendering.md","springboot & reactjs #1 | server side rendering",[6892],{"type":11,"value":17035,"toc":17609},[17036,17039,17042,17044,17046,17106,17108,17111,17114,17119,17122,17125,17128,17131,17134,17138,17143,17146,17149,17152,17160,17163,17166,17174,17180,17206,17209,17218,17224,17227,17230,17235,17242,17247,17262,17269,17280,17294,17298,17312,17316,17323,17338,17341,17344,17349,17364,17369,17376,17379,17403,17408,17418,17422,17443,17446,17448,17451,17464,17469,17472,17475,17478,17513,17524,17530,17535,17541,17544,17557,17563,17565,17587,17593,17595,17603,17607],[14,17037,17032],{"id":17038},"springboot-reactjs-1-server-side-rendering",[18,17040,17041],{},"This is the first article of a series about server side rendering and progressive enhancement. We will implement a\nproduct list that can be sorted by two parameters. Furthermore the app will be progressively enhanced, means the html\ndocument is rendered on the server and javascript will just enhance the app on the client if possible.",[16627,17043],{},[2207,17045,16632],{"id":16631},[12474,17047,17048,17094,17102,17104],{},[580,17049,17050,17051],{},"server side rendering ✅\n",[577,17052,17053,17059,17065,17071,17077,17083,17089],{},[580,17054,17055],{},[585,17056,17058],{"href":17057},"#why-java","Why Java for the backend?",[580,17060,17061],{},[585,17062,17064],{"href":17063},"#why-reactjs","Why ReactJS for the client?",[580,17066,17067],{},[585,17068,17070],{"href":17069},"#springboot","Spring Boot Initializr",[580,17072,17073],{},[585,17074,17076],{"href":17075},"#backend","Backend",[580,17078,17079],{},[585,17080,17082],{"href":17081},"#frontend","Frontend",[580,17084,17085],{},[585,17086,17088],{"href":17087},"#running-the-app","Running the app",[580,17090,17091],{},[585,17092,16671],{"href":17093},"#what-we-have-learned-so-far",[580,17095,17096,17101],{},[585,17097,17100],{"href":17098,"rel":17099},"https://synyx.de/2016/04/springboot-reactjs-progressive-enhancement-based-on-list-sorting/",[589],"progressive enhancement based on list sorting","\n🆕",[580,17103,16674],{},[580,17105,16677],{},[16627,17107],{},[18,17109,17110],{},"Today we are going to create an awesome product list while using progressive enhancement to provide the best user\nexperience possible on each device. Progressive enhancement is a method in web development that sets focus on content\nfirst. The content will then be enhanced with dynamic features (JavaScript) and Layout (css). To provide content even\nwith disabled JavaScript the page has to be rendered on the server. For reasons (explained below) we use Java & spring\non the backend and ReactJS on the client side.",[18,17112,17113],{},"But why do we even want to make the effort of developing a universal web application? And even a progressive one? Of\ncourse this is overhead that must be handled and we have to implement and maintain more APIs as we will see later.\nHowever, the user experience is worth this effort in my opinion.",[18,17115,17116],{},[27,17117,17118],{},"Benefits of universal applications",[18,17120,17121],{},"thanks to server side rendered markup",[18,17123,17124],{},"– the app is fully functional from the start",[18,17126,17127],{},"– the app is usable instantly",[18,17129,17130],{},"JavaScript just enhances the features",[18,17132,17133],{},"– ajax calls without full page reloading are super fast",[18,17135,17136],{},[27,17137,16690],{},[18,17139,17140],{},[585,17141,16697],{"href":16695,"rel":17142},[589],[2207,17144,17058],{"id":17145},"why-java-for-the-backend",[18,17147,17148],{},"To start with our little project we first have to think about the technologies we want to use. At synyx we are mainly\nusing Java for the backend. And over the last year we have built up a large knowledge around the spring ecosystem as\nwell.",[18,17150,17151],{},"So to answer the question",[577,17153,17154,17157],{},[580,17155,17156],{},"team knowledge (Spring, …)",[580,17158,17159],{},"battle tested solutions (Spring Security, Spring MVC, …)",[2207,17161,17064],{"id":17162},"why-reactjs-for-the-client",[18,17164,17165],{},"Well, personally I am a fan of react and it’s ecosystem. That’s it! 😎",[18,17167,17168,17169,986],{},"Okay… actually there are good reasons to use react. React provides a simple API to develop UI components and it works\nwell with reactive architectures like Flux and its\nsuccessor ",[585,17170,17173],{"href":17171,"rel":17172},"https://web.archive.org/web/20160411023634/https://egghead.io/series/getting-started-with-redux",[589],"redux",[18,17175,17176,17177],{},"Furthermore it supports server side rendering quite well. ",[573,17178,17179],{},"(afaik Angular 2 and Ember could also be used, or cyclejs,\nor …)",[11259,17181,17182,17187],{},[18,17183,17184],{},[27,17185,17186],{},"Disclaimer",[18,17188,17189,17190,17195,17196,17200,17201,17205],{},"I will not explain spring, react or $technology. Nor I will explain Java or JavaScript, es2015 syntax in particular.\nThere already are excellent articles out there on the web\nlike ",[585,17191,17194],{"href":17192,"rel":17193},"http://javascriptplayground.com/blog/2016/02/the-react-webpack-tooling-problem",[589],"how to start with react"," (\nwithout\nusing webpack). Additionally I recommend to have a look at the official documentation\nfor ",[585,17197,17025],{"href":17198,"rel":17199},"https://facebook.github.io/react/docs/thinking-in-react.html",[589]," as well as ",[585,17202,1010],{"href":17203,"rel":17204},"https://spring.io/docs",[589],"\nof\ncourse.",[14,17207,17070],{"id":17208},"spring-boot-initializr",[18,17210,17211,17212,17217],{},"At first we’re going to generate a bootstrap project with the awesome ",[585,17213,17216],{"href":17214,"rel":17215},"https://start.spring.io",[589],"spring starter"," web\ninterface. We use gradle as build system, thymeleaf as template engine to render our views on the server and good old\nspring-web. Handlebars would also be an option as template engine but it is not supported by spring initializr.",[18,17219,17220],{},[2223,17221],{"alt":17222,"src":17223},"springboot-initializr","https://media.synyx.de/uploads//2016/02/springboot-initializer.png",[14,17225,17076],{"id":17226},"backend",[18,17228,17229],{},"Starting with the backend we have to create the following files.",[18,17231,17232],{},[27,17233,17234],{},"index.html",[18,17236,17237,17238,17241],{},"The html template is as simple as it could be. We just need a div that acts as container for our ReactJS app.\n",[50,17239,17240],{},"th:utext=\"${content}\""," is thymeleaf specific and injects the content attribute of the view model as unescaped string.",[18,17243,17244],{},[27,17245,17246],{},"React.java",[18,17248,17249,17250,17253,17254,17257,17258,17261],{},"In React.java we are going to instantiate the Nashorn engine to be able to interpret JavaScript code which we will add\nlater. Nashorn is just a runtime environment for JavaScript on the JVM. It neither provides a ",[50,17251,17252],{},"window"," object nor a\n",[50,17255,17256],{},"console"," for logging. But since the latter one is required by ReactJS we have to load a ",[50,17259,17260],{},"nashorn-polyfill.js"," file\nbefore anything else.",[18,17263,17264,17265,17268],{},"We are loading our JavaScript sources into Nashorn with ",[50,17266,17267],{},"nashornScriptEngine.eval (\"load ('...')\")",". This is the same as\nincluding a script tag in a html document.",[18,17270,17271,17272,17275,17276,17279],{},"However, we could also call ",[50,17273,17274],{},"nashorn.eval (new InputStreamReader (...))"," to load the JavaScript files instead of using\nthe Nashorn specific ",[573,17277,17278],{},"load"," function. But we would lose the ability to debug the JavaScript code while running in\nNashorn (at least with IntelliJ). Which could be… useful 😉",[18,17281,17282,17283,17285,17286,17289,17290,17293],{},"Furthermore we have to implement a method ",[573,17284,16759],{}," which will invoke a global ",[50,17287,17288],{},"renderServer"," function\ndefined in ",[573,17291,17292],{},"app.bundle.js"," to create the rendered html string.",[18,17295,17296],{},[27,17297,17260],{},[18,17299,17300,17301,17304,17305,17307,17308,17311],{},"The polyfill for nashorn has to define a ",[27,17302,17303],{},"global"," variable (for reasons I will explain later) and the already\nmentioned ",[27,17306,17256],{},". ",[573,17309,17310],{},"print"," is a Nashorn function that logs on stdout.",[18,17313,17314],{},[27,17315,16883],{},[18,17317,17318,17319,17322],{},"The ProductController is responsible for getting the products and for setting the rendered html string as the ",[50,17320,17321],{},"content","\nattribute of the view model.",[18,17324,17325,17326,17331,17332,17337],{},"Additionally we need\na ",[585,17327,17330],{"href":17328,"rel":17329},"https://github.com/synyx/springboot-reactjs-demo/blob/031a52fee5cc49c91988227b6b29b9857e5fed86/src/main/java/de/synyx/tutorials/spring/reactjs/demo/product/Product.java",[589],"Product.java","\nPOJO and\na ",[585,17333,17336],{"href":17334,"rel":17335},"https://github.com/synyx/springboot-reactjs-demo/blob/031a52fee5cc49c91988227b6b29b9857e5fed86/src/main/java/de/synyx/tutorials/spring/reactjs/demo/product/ProductRepository.java",[589],"ProductRepository.java",".\nI think this is very straight forward and code snippets are obsolete here.",[14,17339,17082],{"id":17340},"frontend",[18,17342,17343],{},"With the backend part ready we can start with the frontend.",[18,17345,17346,17347,986],{},"At first we have to do a small setup to enable es2015 compilation and module bundling with webpack. Webpack is actually\nnot required but eases our developer lives immensely. Babel-cli could also be used with small adjustments in\n",[573,17348,17246],{},[43,17350,17352],{"className":13786,"code":17351,"language":13788,"meta":48,"style":48},"$ npm init\n$ npm i --save-dev webpack babel-core babel-loader babel-preset-es2015 babel-preset-react react react-dom\n\n",[50,17353,17354,17359],{"__ignoreMap":48},[53,17355,17356],{"class":55,"line":56},[53,17357,17358],{},"$ npm init\n",[53,17360,17361],{"class":55,"line":86},[53,17362,17363],{},"$ npm i --save-dev webpack babel-core babel-loader babel-preset-es2015 babel-preset-react react react-dom\n",[18,17365,17366],{},[27,17367,17368],{},"webpack.config.js",[18,17370,17371,17372,17375],{},"Next we configure webpack to generate a bundle of our JavaScript files including the ReactJS library and our app\nbusiness logic. Please note ",[27,17373,17374],{},"output.filename"," which is the file loaded by React.java.",[18,17377,17378],{},"Webpack can then simply be used to create the bundle by a npm task.",[43,17380,17382],{"className":13786,"code":17381,"language":13788,"meta":48,"style":48},"// package.json\n\"scripts\": {\n \"build\": \"webpack\"\n}\n\n",[50,17383,17384,17389,17394,17399],{"__ignoreMap":48},[53,17385,17386],{"class":55,"line":56},[53,17387,17388],{},"// package.json\n",[53,17390,17391],{"class":55,"line":86},[53,17392,17393],{},"\"scripts\": {\n",[53,17395,17396],{"class":55,"line":126},[53,17397,17398],{}," \"build\": \"webpack\"\n",[53,17400,17401],{"class":55,"line":163},[53,17402,282],{},[18,17404,17405],{},[27,17406,17407],{},"ProducList.js",[18,17409,17410,17411,17413,17414,17417],{},"To start with the UI components we are going to create the ProductList as the main component. It will be, drum roll,\nresponsible for displaying a list of products which have a name and a price (remember ",[573,17412,17330],{},"?). Therefore we\nimplement a function that takes our products and returns the representational markup. To avoid\n",[50,17415,17416],{},"\"Cannot read property 'map' of undefined\""," type errors we simply assign an empty array to the products by default.",[18,17419,17420],{},[27,17421,16720],{},[18,17423,17424,17425,17427,17428,17430,17431,17433,17434,17436,17437,17439,17440,17442],{},"Next we need the entry point of our ReactJS app to define the ",[27,17426,17288],{}," function invoked by Nashorn. Remember the\n",[27,17429,17303],{}," variable set in ",[573,17432,17260],{},"? We use this variable now to “export” our ",[573,17435,17288],{}," function. If\nyou are familiar with the NodeJS environment, you already know that the ",[573,17438,17303],{}," object is the equivalent to the\n",[573,17441,17252],{}," object available in the browser. And Nashorn is our equivalent of NodeJS 😉",[14,17444,17088],{"id":17445},"running-the-app",[18,17447,16887],{},[18,17449,17450],{},"Now we can run our first universal server side rendered springboot application to admire our graceful product list. Go\non, run",[43,17452,17454],{"className":13786,"code":17453,"language":13788,"meta":48,"style":48},"$ npm run build\n$ ./gradlew bootRun\n",[50,17455,17456,17460],{"__ignoreMap":48},[53,17457,17458],{"class":55,"line":56},[53,17459,16784],{},[53,17461,17462],{"class":55,"line":86},[53,17463,16789],{},[18,17465,17466,17467,986],{},"open your Browser and load ",[50,17468,16769],{},[18,17470,17471],{},"Just…",[18,17473,17474],{},"to see…",[18,17476,17477],{},"a wonderful stacktrace…",[43,17479,17481],{"className":13786,"code":17480,"language":13788,"meta":48,"style":48},"jdk.nashorn.internal.runtime.ECMAException: TypeError:\n[de.synyx...Product@553287f8, Product@65ae29e6] has no such function \"map\"\n at jdk.nashorn.internal.runtime.ECMAErrors.error(ECMAErrors.java:58) ~[nashorn.jar:na]\n at jdk.nashorn.internal.runtime.ECMAErrors.typeError(ECMAErrors.java:214) ~[nashorn.jar:na]\n at jdk.nashorn.internal.runtime.ECMAErrors.typeError(ECMAErrors.java:186) ~[nashorn.jar:na]\n at jdk.nashorn.internal.runtime.ECMAErrors.typeError(ECMAErrors.java:173) ~[nashorn.jar:na]\n\n",[50,17482,17483,17488,17493,17498,17503,17508],{"__ignoreMap":48},[53,17484,17485],{"class":55,"line":56},[53,17486,17487],{},"jdk.nashorn.internal.runtime.ECMAException: TypeError:\n",[53,17489,17490],{"class":55,"line":86},[53,17491,17492],{},"[de.synyx...Product@553287f8, Product@65ae29e6] has no such function \"map\"\n",[53,17494,17495],{"class":55,"line":126},[53,17496,17497],{}," at jdk.nashorn.internal.runtime.ECMAErrors.error(ECMAErrors.java:58) ~[nashorn.jar:na]\n",[53,17499,17500],{"class":55,"line":163},[53,17501,17502],{}," at jdk.nashorn.internal.runtime.ECMAErrors.typeError(ECMAErrors.java:214) ~[nashorn.jar:na]\n",[53,17504,17505],{"class":55,"line":186},[53,17506,17507],{}," at jdk.nashorn.internal.runtime.ECMAErrors.typeError(ECMAErrors.java:186) ~[nashorn.jar:na]\n",[53,17509,17510],{"class":55,"line":221},[53,17511,17512],{}," at jdk.nashorn.internal.runtime.ECMAErrors.typeError(ECMAErrors.java:173) ~[nashorn.jar:na]\n",[18,17514,17515,17516,17519,17520,17523],{},"The reason is that Nashorn interprets Java objects as, surprise, Java objects. As you remember, our ReactJS component\n",[50,17517,17518],{},"\u003CProductList />"," expects a list of products (actually a JavaScript array). But currently the type of products is a\n",[50,17521,17522],{},"java.util.List"," which doesn’t have the map method. Note the datatype of products in the image below.",[18,17525,17526],{},[2223,17527],{"alt":17528,"src":17529},"nashorn-debugging","https://media.synyx.de/uploads//2016/03/nashorn-debugging.png",[11259,17531,17532],{},[18,17533,17534],{},"“Given a Java array or Collection, this function returns a JavaScript array with a shallow copy of its contents”",[18,17536,17537,17538,17540],{},"So our renderServer function defined in ",[50,17539,16720],{}," must be extended to:",[18,17542,17543],{},"Now we’re ready to go 🙂",[18,17545,17546,17547,17550,17551,17556],{},"Rebuild the frontend with ",[50,17548,17549],{},"npm run build",", restart the Spring Boot application, reload ",[573,17552,17553],{},[585,17554,16769],{"href":16769,"rel":17555},[589]," and\nadmire our awesome product list.",[18,17558,17559],{},[2223,17560],{"alt":17561,"src":17562},"awesome-product-list-001","https://media.synyx.de/uploads//2016/03/awesome-product-list-001.png",[2207,17564,16958],{"id":16957},[577,17566,17567,17570,17576,17584],{},[580,17568,17569],{},"using Nashorn is no rocket science",[580,17571,17572,17573,17575],{},"load js files via ",[50,17574,17267],{}," to enable debugging (at least in IntelliJ)",[580,17577,17578,17580,17581],{},[573,17579,17522],{}," must be converted to JavaScript array with ",[50,17582,17583],{},"Java.from",[580,17585,17586],{},"manually rebuilding and reloading the ReactJS app sucks (autoreload would be cool, right)",[18,17588,17589],{},[2223,17590],{"alt":17591,"src":17592},"js-webpack-nashorn","https://media.synyx.de/uploads//2016/03/js-webpack-nashorn.png",[2207,17594,16991],{"id":16990},[577,17596,17597,17600],{},[580,17598,17599],{},"using webpack to enhance developer experience",[580,17601,17602],{},"implementing the sorting feature",[18,17604,17605],{},[27,17606,17003],{},[607,17608,989],{},{"title":48,"searchDepth":86,"depth":86,"links":17610},[17611,17612,17613,17614,17615],{"id":16631,"depth":86,"text":16632},{"id":17145,"depth":86,"text":17058},{"id":17162,"depth":86,"text":17064},{"id":16957,"depth":86,"text":16958},{"id":16990,"depth":86,"text":16991},[613],"2016-03-11T11:29:12","This is the first article of a series about server side rendering and progressive enhancement. We will implement a\\nproduct list that can be sorted by two parameters. Furthermore the app will be progressively enhanced, means the html\\ndocument is rendered on the server and javascript will just enhance the app on the client if possible.","https://synyx.de/blog/springboot-reactjs-server-side-rendering/",{},"/blog/springboot-reactjs-server-side-rendering",{"title":17032,"description":17041},"springboot-reactjs-server-side-rendering","blog/springboot-reactjs-server-side-rendering",[290,7265,17025,17026,1010,17027],"This is the first article of a series about server side rendering and progressive enhancement. We will implement a product list that can be sorted by two parameters. Furthermore the…","NgbG5R1prPK9EUbjPI1FYf4uauo9uWfYO5zNB_kSUvk",{"id":17629,"title":17630,"author":17631,"body":17632,"category":17741,"date":17742,"description":17743,"extension":617,"link":17744,"meta":17745,"navigation":499,"path":17746,"seo":17747,"slug":17636,"stem":17749,"tags":17750,"teaser":17756,"__hash__":17757},"blog/blog/visual-thinking-synyx-sketcht.md","Visual Thinking – synyx sketcht",[12282,12981],{"type":11,"value":17633,"toc":17739},[17634,17637,17652,17655,17661,17664,17667,17670,17676,17679,17682,17685,17688,17694,17697,17700,17705,17711,17717,17721,17727,17733],[14,17635,17630],{"id":17636},"visual-thinking-synyx-sketcht",[18,17638,17639,17640,17645,17646,17651],{},"Anstatt immer wieder zur Fortbildung auf Konferenzen zu fahren hatten wir dieses Jahr noch eine andere Idee: Wir hatten\neinen Inhouse-Workshop zum Thema ",[585,17641,17644],{"href":17642,"rel":17643},"http://www.frauhoelle.com/sketchnotes/",[589],"“Visual Thinking”"," mit Tanja\nalias ",[585,17647,17650],{"href":17648,"rel":17649},"https://twitter.com/frauhoelle",[589],"@frauhoelle",". Ziel des Workshops war es, den Teilnehmern die Bildsprache näher zu\nbringen und dadurch die Kommunikation zu verbessern.",[18,17653,17654],{},"Da am Workshop mit 11 Teilnehmern großes Interesse war haben wir die Themen auf mehrere Tage verteilt: Am ersten Tag\nhatten wir eine gemeinsame Einführung mit grundlegenden Themen, Techniken und Symbolen. Tag zwei blieb dann unseren\n“Moderatoren” zur Spezialisierung und an Tag drei kamen wir “Techniker” nochmals zum Zug.",[18,17656,17657],{},[2223,17658],{"alt":17659,"src":17660},"\"sketch_kombiniert\"","https://media.synyx.de/uploads//2015/12/sketch_kombiniert-e1450077980406.jpg",[18,17662,17663],{},"Als Informatiker sind wir es gewohnt sehr stark zu abstrahieren und Dinge aufs “Wesentliche” zu reduzieren. Wir\nbevorzugen Schrift, eckige Kästen und gerade Linien um unsere Sachverhalte anderen zu erklären. Wir haben quasi verlernt\nvisuell zu sprechen. Es ist jedoch oft so, dass die Sachverhalte so schwer greifbar werden und sich nicht wirklich in\nunser Gedächtnis einprägen. Dem wollten wir mit einer Auffrischung unserer kreativen Fähigkeiten entgegenwirken. Wir\nwollten künftig unsere Kommunikation – egal ob mit Kunden oder Kollegen – visueller und einprägsamer gestalten.",[18,17665,17666],{},"Aus unserer Sicht war das eigentliche Ziel des Workshops die Motivation der Teilnehmer sich von Schrift, Altbekanntem\nund vor allem Digitalem zu lösen und den Mut zu finden, wieder Kind zu sein und unterstützend Bilder zu malen.",[18,17668,17669],{},"An allen Workshop-Tagen stand das spielerische Lernen und Mitmachen im Vordergrund. So war immer Action und es wurde\nnie langweilig. Obwohl natürlich etwas Theorie mit dabei war, wurde schon in der ersten Stunde tatkräftig visualisiert.\nDas war vor allem deshalb möglich, weil Tanja Unmengen von Stiften und Material mit im Gepäck hatte und diese stets\ngriffbereit vor unseren Nasen lagen. Darüber hinaus stand sie uns permanent mit Rat, Tat und Motivation zur Seite.",[18,17671,17672],{},[2223,17673],{"alt":17674,"src":17675},"\"Jeder macht mit\"","https://media.synyx.de/uploads//2015/12/alle_am_tisch-e1449841973591.jpg",[18,17677,17678],{},"Am ersten Tag erlernten wir spielerisch die Grundlagen wie Punkte, Striche, Dreiecke, Vierecke und Kreise. Nachmittags\nwurden aus diesen dann komplexere Symbole bis hin zu unserem eigenen Symbol-Alphabet. Wir haben gelernt unsere Ideen\nmit möglichst wenigen Strichen zu transportieren und mit kleinen Details wie Schatten einen große Wirkung zu erzielen.\nDabei haben verschiedene Übungen unsere Kreativität geweckt und uns Selbstvertrauen gegeben.",[18,17680,17681],{},"Am zweiten Tag wurden die Grundlagen und unsere Symbole weiter verfeinert und schließlich um Figuren mit Gesten und\nEmotionen ergänzt. Dazu kamen Schrift, Rahmen, Platzaufteilung und viele kleine Kniffe und Tricks. Die Übungen\nfokussierten sich mehr auf das Visualisieren komplexerer Sachverhalte, Vorträge oder Geschichten.",[18,17683,17684],{},"Durch den Workshop haben wir verschiedene Möglichkeiten kennengelernt, unsere Kommunikation durch visuelle Elemente zu\nverbessern. Vor allem haben wir aber den Mut und das Selbstvertrauen gewonnen, diese Kenntnisse einzusetzen – egal ob am\nFlipchart, auf Papier oder auf PostITs.",[18,17686,17687],{},"Das alles ist schon im synyx-Büro erkennbar: Flipcharts und andere Elemente sind auf einmal bunter und mit der Kraft\nder Bilder aufgepeppt und viele Teilnehmer sind eifrig dabei ihre Fähigkeiten weiter zu verbessern.",[18,17689,17690],{},[2223,17691],{"alt":17692,"src":17693},"\"EIndrücke aus dem Büro\"","https://media.synyx.de/uploads//2015/12/buero-e1449847979285.jpg",[18,17695,17696],{},"Insgesamt können wir Frau Hölle und ihre Workshops nur wärmstens weiterempfehlen: Wir hatten viel Spaß und haben viel\nWissen und Mut mitgenommen.",[18,17698,17699],{},"Ab sofort haben wir immer einen Schattenstift in der Tasche!",[18,17701,17702],{},[27,17703,17704],{},"Zum Schluss noch paar Eindrücke vom Workshop…",[18,17706,17707],{},[2223,17708],{"alt":17709,"src":17710},"\"Alle mit dabei\"","https://media.synyx.de/uploads//2015/12/brainstorm-e1449841965214.jpg",[18,17712,17713],{},[2223,17714],{"alt":17715,"src":17716},"\"Kreatives Chaos\"","https://media.synyx.de/uploads//2015/12/chaotisch.jpg",[18,17718,17719],{},[2223,17720],{"alt":17674,"src":17675},[18,17722,17723],{},[2223,17724],{"alt":17725,"src":17726},"\"spassdabei\"","https://media.synyx.de/uploads//2015/12/spassdabei-e1449842435714.jpg",[18,17728,17729],{},[2223,17730],{"alt":17731,"src":17732},"\"Ergebnis einer Übungsaufgabe\"","https://media.synyx.de/uploads//2015/12/whiteboard-e1449841924365.jpg",[18,17734,17735],{},[2223,17736],{"alt":17737,"src":17738},"\"Fazit\"","https://media.synyx.de/uploads//2015/12/wasnehmenwirmit-e1449841945934.jpg",{"title":48,"searchDepth":86,"depth":86,"links":17740},[],[613],"2015-12-14T10:49:00","Anstatt immer wieder zur Fortbildung auf Konferenzen zu fahren hatten wir dieses Jahr noch eine andere Idee: Wir hatten\\neinen Inhouse-Workshop zum Thema “Visual Thinking” mit Tanja\\nalias @frauhoelle. Ziel des Workshops war es, den Teilnehmern die Bildsprache näher zu\\nbringen und dadurch die Kommunikation zu verbessern.","https://synyx.de/blog/visual-thinking-synyx-sketcht/",{},"/blog/visual-thinking-synyx-sketcht",{"title":17630,"description":17748},"Anstatt immer wieder zur Fortbildung auf Konferenzen zu fahren hatten wir dieses Jahr noch eine andere Idee: Wir hatten\neinen Inhouse-Workshop zum Thema “Visual Thinking” mit Tanja\nalias @frauhoelle. Ziel des Workshops war es, den Teilnehmern die Bildsprache näher zu\nbringen und dadurch die Kommunikation zu verbessern.","blog/visual-thinking-synyx-sketcht",[17751,14037,17752,17753,17754,17755],"fortbildung","sketch","visual-thinking","viz","workshop","Anstatt immer wieder zur Fortbildung auf Konferenzen zu fahren hatten wir dieses Jahr noch eine andere Idee: Wir hatten einen Inhouse-Workshop zum Thema “Visual Thinking” mit Tanja alias @frauhoelle. Ziel…","BgXvHMdUrC7pnmgTFg60lYGkFIjmmLWyZZkZMlr7038",{"id":17759,"title":17760,"author":17761,"body":17763,"category":18187,"date":18188,"description":18189,"extension":617,"link":18190,"meta":18191,"navigation":499,"path":18192,"seo":18193,"slug":17767,"stem":18195,"tags":18196,"teaser":18197,"__hash__":18198},"blog/blog/iotcon-2015-berlin.md","IoTCon 2015 Berlin",[17762],"theuer",{"type":11,"value":17764,"toc":18163},[17765,17768,17777,17784,17788,17792,17803,17824,17837,17840,17849,17855,17863,17867,17878,17881,17890,17893,17900,17904,17908,17929,17937,17948,17952,17955,17958,17961,17964,17968,17975,17978,17982,17985,17988,17995,18001,18004,18010,18014,18021,18024,18027,18031,18037,18041,18044,18047,18051,18055,18058,18061,18065,18068,18098,18102,18105,18109,18116,18120,18130,18133,18136,18140,18143,18146,18150,18157,18160],[14,17766,17760],{"id":17767},"iotcon-2015-berlin",[18,17769,17770,17771,17776],{},"From August 31st to September 2nd 2015 I attended the ",[585,17772,17775],{"href":17773,"rel":17774},"https://iotcon.de/2015/",[589],"Internet of Things Conference"," at the\nnHow hotel in Berlin. Monday was a workshop day, while Tuesday and Wednesday were the actual conference days with talks\nand keynotes. There was a second workshop day on Thursday, in which I did not participate.",[18,17778,17779,17780,17783],{},"Below the fold you will find a (not",[573,17781,17782],{},"that",") short summary of the workshops and talks I attended.",[2207,17785,17787],{"id":17786},"monday-workshop-day","Monday: Workshop Day",[649,17789,17791],{"id":17790},"game-of-things","Game of Things",[18,17793,17794,17795,17798,17799,17802],{},"A workshop playing the ",[573,17796,17797],{},"Game of Things:"," A brainstorming game for coming up with new ideas for – well – ",[573,17800,17801],{},"things"," (to\nconnect to the Internet). The players split into teams and the game is split into three phases:",[18,17804,17805,8713,17808,17811,17812,17815,17816,17819,17820,17823],{},[27,17806,17807],{},"Phase 1:",[573,17809,17810],{},"Finding Ideas."," There is ",[573,17813,17814],{},"business driver"," set by the game master or – usually – the employer. All teams\nwork with the same business driver. Next, each team pulls one or more ",[573,17817,17818],{},"technology cards"," from the deck and an ",[573,17821,17822],{},"event\ncard"," that serves as a further constraint. Now the team tries to combine the business driver with their technologies –\nwithin the constraint given by the event card, if possible – to come up with ideas.",[18,17825,17826,8713,17829,17832,17833,17836],{},[27,17827,17828],{},"Phase 2:",[573,17830,17831],{},"Auctioning Ideas."," Each team has 30 seconds to present each of their ideas, then the other teams can ",[573,17834,17835],{},"bid","\non the ideas. The team with the highest bid receives the idea, the team that came up with it receives the money. Money\nearned in an auction can be used in later rounds to buy ideas.",[18,17838,17839],{},"Phases 1 & 2 are repeated several times.",[18,17841,17842,8713,17845,17848],{},[27,17843,17844],{},"Phase 3:",[573,17846,17847],{},"Finale."," Each team ranks the ideas they bought and chooses the best one, possibly combining several ideas\ninto one. They then present their idea and in the end all players – no longer in teams – vote on the ideas to find the\nbest idea of the game. The team that came up with the idea and the team that bought it win the game.",[18,17850,17851,17852],{},"The winning idea of our game was: ",[573,17853,17854],{},"Seamless navigation with virtual graffiti as direction markers (via smartglasses).",[18,17856,17857,17858,17862],{},"The game is free and open source and can be downloaded ",[585,17859,10819],{"href":17860,"rel":17861},"http://www.maibornwolff.de/game-of-things-download",[589]," (\nGerman only).",[649,17864,17866],{"id":17865},"hands-on-iot","Hands on IoT",[18,17868,17869,17870,17877],{},"This was a workshop for using the ",[573,17871,17872],{},[585,17873,17876],{"href":17874,"rel":17875},"https://www.arduino.cc/en/Main/ArduinoBoardYun",[589],"Arduino Yún"," to connect simple\nelectronics to the Internet. In this case, we used a pushbutton to send tweets.",[18,17879,17880],{},"The Arduino Yún is an Arduino micro-controller with an embedded Linux system on board, which can use Wifi or Ethernet\nto connect to the Internet. While we used a simple pushbutton, it would be easy to use various sensors to send data over\nthe Internet.",[18,17882,17883,17884,17889],{},"Instead of talking to the twitter API directly we used ",[585,17885,17888],{"href":17886,"rel":17887},"http://temboo.com/",[589],"Temboo",", an IoT API service which makes it\nvery easy to connect electronics to a diverse set of web services. It can even generate your Arduino code for you.",[18,17891,17892],{},"All in all, this was a basic introductory workshop to IoT with Arduino.",[18,17894,17895,17896,986],{},"You can download the\nslides ",[585,17897,10819],{"href":17898,"rel":17899},"https://www.copy.com/s/t%3Ak9oyktMwsfuQc3Y2%3Bp%3A%252Farduino_iotcon2015_berlin.pdf%3Boid%3A615",[589],[2207,17901,17903],{"id":17902},"tuesday-conference-day-i","Tuesday: Conference Day I",[649,17905,17907],{"id":17906},"the-tangible-mind","The Tangible Mind",[18,17909,14287,17910,17913,17914,17917,17918,17921,17922,17925,17926,986],{},[573,17911,17912],{},"very"," interesting talk about how technology has to become more ",[573,17915,17916],{},"tangible",", how the ",[573,17919,17920],{},"user experience"," is really the\n",[573,17923,17924],{},"human experience"," and has to include ",[573,17927,17928],{},"all senses",[18,17930,17931,17932,986],{},"The talk is hard to summarize in a short blog post, but you can read a longer blog post on\nthe ",[585,17933,17936],{"href":17934,"rel":17935},"http://andreakrajewski.com/2015/08/30/the-tangible-mind/",[589],"speaker’s blog",[18,17938,17939,17940,8713,17945,986],{},"TL;DR: ",[27,17941,17942],{},[573,17943,17944],{},"We",[573,17946,17947],{},"are the Internet of Things",[649,17949,17951],{"id":17950},"how-do-i-tell-it-to-my-smartglasses","How do I tell it to my smartglasses?",[18,17953,17954],{},"How do we communicate with our smartglasses? Speech recognition is flexible and hands-free, but unreliable. Gestures\nare flexible and natural but hard to learn and to implement. Using a phone raises the question: Why have smartglasses at\nall? Wearables are mostly still experimental and unreliable or clumsy.",[18,17956,17957],{},"The answer is to use several methods at once to increase reliability.",[18,17959,17960],{},"For example pointing with an arm that wears a smartwatch, looking in the same direction and speaking a command. Or\ndetecting the proximity of another smart item and the user looking at it while making a simple gesture.",[18,17962,17963],{},"There is still a lot of research and development necessary and many technical hurdles to take in this area.",[649,17965,17967],{"id":17966},"keynote-connected-2020-the-intersection-of-technology-fashion","Keynote: Connected 2020: The Intersection of Technology + Fashion",[18,17969,17970,17971,17974],{},"A short, ",[573,17972,17973],{},"fluffy"," talk about how designers are extremely important for wearables. To create wearables that people will\nwant to wear, technologists and designers need to cooperate and communicate with each other.",[18,17976,17977],{},"Current examples of such cooperation are Misfit & Swarovski or OMSingnal & Ralph Lauren.",[649,17979,17981],{"id":17980},"privacy-on-the-internet-of-things","Privacy on the Internet of Things",[18,17983,17984],{},"A short talk about the current state of EU privacy laws and how they apply to the IoT.",[18,17986,17987],{},"While the IoT lives from gathering and analyzing data, we have to take great care what data we collect and whom we share\nit with.",[18,17989,17990,17991,17994],{},"Anonymized data can be saved without problems, but personally identifying data requires opt-in by the user. A decision\nto opt-in must be made freely and be well informed ",[573,17992,17993],{},"before"," data collection starts. It shouldn’t be hidden in the terms\nof service or vague.",[18,17996,17997,17998],{},"“Medical” data, which also includes religious or sexual preference, IQ etc. is ",[573,17999,18000],{},"especially protected.",[18,18002,18003],{},"Transferring the data outside the EU is very problematic and has strict requirements.",[18,18005,18006,18007],{},"When more than 9 persons handle data, the company needs a dedicated ",[573,18008,18009],{},"data protection officer**.",[649,18011,18013],{"id":18012},"hello-ibeacon","Hello iBeacon",[18,18015,18016,18017,18020],{},"A short introduction to iBecaons. iBeacons are small transmitters that use ",[573,18018,18019],{},"Bluetooth Low Energy"," (BLE) to broadcast a\nspecific ID. These can be identified by apps installed on a smartphone. This is useful for e.g. tracking users or\nproviding indoor navigation.",[18,18022,18023],{},"iBeacon uses no authentication and is easy to emulate with Android devices.",[18,18025,18026],{},"Google has launched a competing technology named Eddystone.",[649,18028,18030],{"id":18029},"connecting-things-via-your-smartphone","Connecting Things via your Smartphone",[18,18032,18033,18034,18036],{},"This talk was about how we can connect ",[573,18035,17801],{}," to the internet without a dedicated connection. To do this, the things\nconnect via BLE to passing smartphones that run a specific app, which forwards the thing’s data to the Internet. This is\nparticularly useful for retail spaces or similar, where employees (with smartphones in their pockets) roam.",[649,18038,18040],{"id":18039},"smart-home-but-where-is-the-intelligence","Smart Home: But where is the intelligence?",[18,18042,18043],{},"Most smart homes these days only act on direct user input. That’s not very “smart”. Home life, especially for families,\nis too complex to model with simple rules or patterns. Automatically detecting the inhabitants’ situations is unreliable\nand difficult to implement.",[18,18045,18046],{},"A solution would be situation-based rulesets – “recipes” – which the user activates either manually or automatically\nthrough proximity (via beacons or GPS), and which can be shared with other smart home inhabitants. Instead of using\nswitches to activate recipes, a personality á la Siri or Jarvis could be implemented.",[2207,18048,18050],{"id":18049},"wednesday-conference-day-ii","Wednesday: Conference Day II",[649,18052,18054],{"id":18053},"keynote-design-from-manufacturing","Keynote: Design from Manufacturing",[18,18056,18057],{},"Mengmeng Chen of Seeed Studio talked about how her company helps designers and makers go from a garage prototype to\nmanufacturing tens of thousands units in China. Seeed Studio is based in Shenzhen in China but also has a branch in\nCalifornia, where they can get prototypes and small series done in a matter of days. They also provide experience and\ncontacts in the world of Chinese manufacturing, which is complicated and hard to break into.",[18,18059,18060],{},"Seeed Studio also provides a development platform, including a Hardware Development Kit, an Open Parts Library and\nShared Supply Chain and short turnaround time Prototype Production.",[649,18062,18064],{"id":18063},"iot-cloud-solutions-the-why-and-how","IoT Cloud Solutions: The why and how",[18,18066,18067],{},"The Internet of Things lives on data, and data needs to be stored and analyzed. Building a data-platform from scratch\nmeans facing a lot of problems that other people have already solved. There is no need to reinvent the wheel, just focus\non what you do best and choose partners for everything else. That is where cloud platforms come into play.",[18,18069,18070,18071,99,18076,99,18081,4816,18086,18091,18092,11792,18095,986],{},"This talk compared several different such platforms, such\nas ",[585,18072,18075],{"href":18073,"rel":18074},"https://xively.com/",[589],"xively",[585,18077,18080],{"href":18078,"rel":18079},"https://evrythng.com/",[589],"Evrythng",[585,18082,18085],{"href":18083,"rel":18084},"https://www.pubnub.com/",[589],"PubNub",[585,18087,18090],{"href":18088,"rel":18089},"https://thethings.io/",[589],"thethings.io"," in the categories:",[573,18093,18094],{},"Understanding Business, Connect the Thing, Learn from\nCustomers, Back-End, Apps, Tools, Interoperability",[573,18096,18097],{},"Support",[649,18099,18101],{"id":18100},"electronics-for-software-developers","Electronics for Software developers",[18,18103,18104],{},"A very basic crash course that contrasted software development concepts with hardware development concepts. Basically a\nslightly modified “Arduino for beginners” talk.",[649,18106,18108],{"id":18107},"keynote-mx3d-bridge-project","Keynote: MX3D Bridge project",[18,18110,18111,18112,18115],{},"MX3D is a company that researches 3D printing technologies. They design and sell organically designed furniture and as\ntheir new prestige project decided to print a ",[573,18113,18114],{},"bridge"," across a canal in Amsterdam with their new Multi-Axis Metal 3D\nPrinter.",[649,18117,18119],{"id":18118},"augmented-reality-state-of-the-union","Augmented Reality – State of the Union",[18,18121,18122,18123,11792,18126,18129],{},"An overview over the current state of AR. The technology has found wide acceptance in ",[573,18124,18125],{},"sales",[573,18127,18128],{},"advertisement"," with\nstatic installations and the occasional smartphone or tablet app. Smartglasses are only in use for professional\napplications like maintenance work.",[18,18131,18132],{},"Apple has bought and closed down Metaio, the provider of the most advanced AR SDK, leaving Wikitude as the new leader in\nSDKs. Other platforms are either way behind or still in development.",[18,18134,18135],{},"The future of AR is looking very good with improved smartglasses coming to market soon and new technologies like\nRGB+Depth information (RGB+D), light estimation, thermal touch and face recognition.",[649,18137,18139],{"id":18138},"mobile-payment-the-future-of-payment","Mobile Payment: The future of payment",[18,18141,18142],{},"While mobile payment is yet in it’s infancy in Germany, other countries are making great leaps. The major players are\nApple, Google and Samsung, and this competition creates a lot of innovation. Unlike earlier attempts, Google’s new\noffering is independent of manufacturers and network operators.",[18,18144,18145],{},"Mobile Payment has become much more secure with smartcard functionality being emulated on modern smartphones.",[2207,18147,18149],{"id":18148},"final-thoughts","Final thoughts",[18,18151,18152,18153,18156],{},"The location was well chosen for the few hundred attendees and the food was really good for the most part. I wish I\ncould have eaten more than one of those ",[573,18154,18155],{},"amazing"," brownies 😉.",[18,18158,18159],{},"The keynotes were all surprisingly light on substance, but most of the other talks were quite interesting, sometimes\neven enlightening. The “Expo” on the other hand was tiny and completely focused on mobile development, catering only to\nthe simultaneous MobileTech Conference.",[18,18161,18162],{},"All in all a decent conference, but considering the price I’m not likely to attend next year. I could probably have\nattended two equally well-hosted conferences for not much more – if any – money.",{"title":48,"searchDepth":86,"depth":86,"links":18164},[18165,18169,18178,18186],{"id":17786,"depth":86,"text":17787,"children":18166},[18167,18168],{"id":17790,"depth":126,"text":17791},{"id":17865,"depth":126,"text":17866},{"id":17902,"depth":86,"text":17903,"children":18170},[18171,18172,18173,18174,18175,18176,18177],{"id":17906,"depth":126,"text":17907},{"id":17950,"depth":126,"text":17951},{"id":17966,"depth":126,"text":17967},{"id":17980,"depth":126,"text":17981},{"id":18012,"depth":126,"text":18013},{"id":18029,"depth":126,"text":18030},{"id":18039,"depth":126,"text":18040},{"id":18049,"depth":86,"text":18050,"children":18179},[18180,18181,18182,18183,18184,18185],{"id":18053,"depth":126,"text":18054},{"id":18063,"depth":126,"text":18064},{"id":18100,"depth":126,"text":18101},{"id":18107,"depth":126,"text":18108},{"id":18118,"depth":126,"text":18119},{"id":18138,"depth":126,"text":18139},{"id":18148,"depth":86,"text":18149},[613],"2015-09-18T13:55:51","From August 31st to September 2nd 2015 I attended the Internet of Things Conference at the\\nnHow hotel in Berlin. Monday was a workshop day, while Tuesday and Wednesday were the actual conference days with talks\\nand keynotes. There was a second workshop day on Thursday, in which I did not participate.","https://synyx.de/blog/iotcon-2015-berlin/",{},"/blog/iotcon-2015-berlin",{"title":17760,"description":18194},"From August 31st to September 2nd 2015 I attended the Internet of Things Conference at the\nnHow hotel in Berlin. Monday was a workshop day, while Tuesday and Wednesday were the actual conference days with talks\nand keynotes. There was a second workshop day on Thursday, in which I did not participate.","blog/iotcon-2015-berlin",[],"From August 31st to September 2nd 2015 I attended the Internet of Things Conference at the nHow hotel in Berlin. Monday was a workshop day, while Tuesday and Wednesday were…","tVFeEXG1Hvv-Ftl46yLXQ7BzYziKw3KPf3hxvvaTP7Y",{"id":18200,"title":18201,"author":18202,"body":18203,"category":18373,"date":18374,"description":18375,"extension":617,"link":18376,"meta":18377,"navigation":499,"path":18378,"seo":18379,"slug":18207,"stem":18380,"tags":18381,"teaser":18388,"__hash__":18389},"blog/blog/how-to-monitor-jaxrsjersey-applications.md","How to monitor JAXRS/Jersey applications",[16523],{"type":11,"value":18204,"toc":18371},[18205,18208,18211,18222,18230,18235,18238,18241,18250,18253,18312,18315,18322,18325,18342,18352,18359,18366,18369],[14,18206,18201],{"id":18207},"how-to-monitor-jaxrsjersey-applications",[18,18209,18210],{},"If you nowadays visit a conference, you still might get into contact with sessions where people are talking about\nmonitoring or at least some aspects of it and ALM (application lifecycle management) is a really important discipline a\nteam or project should should take into account right from the beginning and no, this doesn’t mean that you should trim\nor optimize prematurely, but to have an eye on it. Next to the developers and operators we can identify many more\nstakeholders who are interested in the data, but generally they prefer a different view on the data.",[577,18212,18213,18216,18219],{},[580,18214,18215],{},"Who is the audience, who uses the API in what version?",[580,18217,18218],{},"How can I economize resources, but for specific cases only?",[580,18220,18221],{},"How can I use the data to prevent accidents or control specific nodes?",[18,18223,18224,18225,18229],{},"The code can be found at ",[585,18226,15107],{"href":18227,"rel":18228},"https://github.com/synyx/meter.git",[589]," and this article shall, simply spoken, show the\nmotivation behind it. So, in one sentence I would say:",[11259,18231,18232],{},[18,18233,18234],{},"We want fine grained statistics about things that happen without writing much integration code and to partition the\ndata at runtime using the provided input.",[18,18236,18237],{},"Enabling monitoring ‘always’ require us to follow the same pattern (do something before and optionally do something\nafter), so it would be nice to simply not do the same things over and over again. Monitoring can be seen as a classical\ncross cutting concern and even if we loose some control at implementation level, we can profit on less maintenance\neffort and a better system design – which is a good trade in my opinion.",[18,18239,18240],{},"When I hear the words ‘cross cutting concerns’ then instantly AOP (aspect oriented programming) comes into my mind and\nthose techniques shall pave the way as described in the sentence above. It can be further used independently of the\nunderlying technology – of course we need some technology glue to wire the aspects, but this must generally be done only\nonce.",[18,18242,18243,18244,18249],{},"Many public APIs follow the REST pattern today. So we decided to go with Jersey first as it’s a great framework for\nbuilding enterprise REST services. Jersey uses HK2 internally and you can vary almost every part at runtime with a\nfluent java API – If you know Google Guice then you might get an impression now. HK2 supports AOP through the libraries\nfrom the AOP Alliance and you can hook this process easily following the guidelines of\nfrom ",[585,18245,18248],{"href":18246,"rel":18247},"https://hk2.java.net/2.2.0/aop-example.html",[589],"aop-example",". That’s a pretty good news and a mighty joinpoint for\nmetrics, a quality library to gather runtime statistics.",[18,18251,18252],{},"Right now, our recipe contains Jersey, Metrics, AOP, and Java annotations as markup and if we plug everything together\nwe achieve something like this:",[43,18254,18256],{"className":288,"code":18255,"language":290,"meta":48,"style":48}," @GET\n @Metric (\n timers = @Timer,\n histograms = @Histogram (value = \"#size\", measure = BambooResponseSize.class),\n counters = @Counter (value = \"{color}\", kind = Kind.Error)\n )\n @Path (\"{name}\")\n public String echo (@PathParam (\"name\") String name, @QueryParam (\"locale\") String locale, @DefaultValue (\"Green\") @QueryParam (\"color\") Color color) {\n validate (color);\n return service.call (name + \"::\" + locale);\n }\n\n",[50,18257,18258,18263,18268,18273,18278,18283,18288,18293,18298,18303,18308],{"__ignoreMap":48},[53,18259,18260],{"class":55,"line":56},[53,18261,18262],{}," @GET\n",[53,18264,18265],{"class":55,"line":86},[53,18266,18267],{}," @Metric (\n",[53,18269,18270],{"class":55,"line":126},[53,18271,18272],{}," timers = @Timer,\n",[53,18274,18275],{"class":55,"line":163},[53,18276,18277],{}," histograms = @Histogram (value = \"#size\", measure = BambooResponseSize.class),\n",[53,18279,18280],{"class":55,"line":186},[53,18281,18282],{}," counters = @Counter (value = \"{color}\", kind = Kind.Error)\n",[53,18284,18285],{"class":55,"line":221},[53,18286,18287],{}," )\n",[53,18289,18290],{"class":55,"line":242},[53,18291,18292],{}," @Path (\"{name}\")\n",[53,18294,18295],{"class":55,"line":273},[53,18296,18297],{}," public String echo (@PathParam (\"name\") String name, @QueryParam (\"locale\") String locale, @DefaultValue (\"Green\") @QueryParam (\"color\") Color color) {\n",[53,18299,18300],{"class":55,"line":279},[53,18301,18302],{}," validate (color);\n",[53,18304,18305],{"class":55,"line":496},[53,18306,18307],{}," return service.call (name + \"::\" + locale);\n",[53,18309,18310],{"class":55,"line":503},[53,18311,860],{},[18,18313,18314],{},"One nice thing to mention is, that everything managed by HK2 can be annotated and therefore measured – including\nresource methods and services from the DI container – fine grained control at method level :).",[18,18316,18317,18318,18321],{},"So what do I mean with ",[573,18319,18320],{},"a partition at runtime"," then?",[18,18323,18324],{},"You can find some the JAXRS annotations in the previous example, e.g. PathParam, QueryParam and of course, Jersey knows\nthe interpretation of the parameters, but do we know as well? Yes and that’s pretty awesome, as it allows us to",[577,18326,18327,18330,18333,18336,18339],{},[580,18328,18329],{},"build metrics which are partitioned by customer levels : .., silver, gold, platinum",[580,18331,18332],{},"build metrics for versioned APIs: /v1/, …, /v9/ — anyone using v1, costs?",[580,18334,18335],{},"build metrics to track clients by geography, cookie, header",[580,18337,18338],{},"build metrics that measure errors only",[580,18340,18341],{},"build metrics for if-you-can-name-it-you-can-measure-it things.",[18,18343,18344],{},[573,18345,18346,18347],{},"A conversion takes place prior, so you can run every custom evaluation\nbeforehand: ",[585,18348,18351],{"href":18349,"rel":18350},"https://jersey.java.net/documentation/latest/user-guide.html#d0e2152",[589],"userguide",[18,18353,18354,18355,18358],{},"And thats’ what I really like the most 🙂 – You have access to runtime values from ALL services, resources managed by\nJersey/HK2 to configure the ",[27,18356,18357],{},"usecase"," you want.",[18,18360,18361,18362,18365],{},"If you like to contribute or participate on items mentioned on the roadmap or issue something, thats not on the roadmap\n🙂 then feel free to visit us at ",[585,18363,15107],{"href":18227,"rel":18364},[589],", or even if you want to try out the example\nproject to get a first impression.",[18,18367,18368],{},"Feedback is highly welcome and many thanks from me to the developers of Jersey/HK2 and Metrics for their great work –\nnice piece of software.",[607,18370,989],{},{"title":48,"searchDepth":86,"depth":86,"links":18372},[],[613],"2015-07-29T09:02:17","If you nowadays visit a conference, you still might get into contact with sessions where people are talking about\\nmonitoring or at least some aspects of it and ALM (application lifecycle management) is a really important discipline a\\nteam or project should should take into account right from the beginning and no, this doesn’t mean that you should trim\\nor optimize prematurely, but to have an eye on it. Next to the developers and operators we can identify many more\\nstakeholders who are interested in the data, but generally they prefer a different view on the data.","https://synyx.de/blog/how-to-monitor-jaxrsjersey-applications/",{},"/blog/how-to-monitor-jaxrsjersey-applications",{"title":18201,"description":18210},"blog/how-to-monitor-jaxrsjersey-applications",[18382,18383,290,18384,18385,18386,18387],"aop","hk2","jaxrs","jersey","metrics","rest","If you nowadays visit a conference, you still might get into contact with sessions where people are talking about monitoring or at least some aspects of it and ALM (application…","9VLoatX0lXioeJDq5DXTaDizrmyuyEy9Kq7czFYrBAU",{"id":18391,"title":18392,"author":18393,"body":18395,"category":18575,"date":18576,"description":18577,"extension":617,"link":18578,"meta":18579,"navigation":499,"path":18580,"seo":18581,"slug":18399,"stem":18583,"tags":18584,"teaser":18592,"__hash__":18593},"blog/blog/devoxx-poland-2015-summary.md","Devoxx Poland 2015 Summary",[18394],"szulc",{"type":11,"value":18396,"toc":18573},[18397,18400,18421,18427,18430,18435,18438,18455,18461,18466,18469,18472,18475,18481,18486,18489,18492,18498,18503,18506,18509,18514,18517,18520,18525,18528,18531,18534,18539,18542,18545,18548,18551,18554,18558,18561,18570],[14,18398,18392],{"id":18399},"devoxx-poland-2015-summary",[18,18401,18402,18403,18408,18409,18414,18415,18420],{},"So that’s it. Three days, 2.000 Developers from 20 countries, over 140 speakers from around the world, and one\noutstanding beautiful city. It is for the first time, when ",[585,18404,18407],{"href":18405,"rel":18406},"http://devoxx.pl",[589],"Devoxx Poland"," (previously known\nas ",[585,18410,18413],{"href":18411,"rel":18412},"http://2014.33degree.org",[589],"33rd Degree","), one of the most recognizable European Java Conference took place in Krakow,\nthe city of the polish kings, and one of the most important places in whole polish history. It took place from Monday to\nWednesday last week in the ",[585,18416,18419],{"href":18417,"rel":18418},"http://www.icekrakow.pl/",[589],"ICE Conference Center",", which is located directly by the Vistula\nriver, with beautiful view over the Wawel Royal Castle.",[18,18422,18423],{},[2223,18424],{"alt":18425,"src":18426},"\"Inside the ICE Conference Center \"","https://media.synyx.de/uploads//2015/06/IMG_20150624_110556.jpg",[18,18428,18429],{},"Now it’s time to write some short summary. Below is my personal list of conclusions and analysis about the current state\nof Java industry and where it is going to, based on that what I’ve heard and seen during the conference.",[18,18431,18432],{},[27,18433,18434],{},"1. The rise of Microservices",[18,18436,18437],{},"You might say – “yep, of course”. It is obvious for everyone who is following the development of Java & Web Ecosystem\nthat microservices are the hottest and fancies “new” “technology” of the year 2015. I counted that around 10 talks (\nfrom around 100 full-time presentations) were completely dedicated to them. Another couple or so discussed tools\nemerged to make them easier to monitor, deploy and deal with them. And nearly every other talk mentioned them.",[18,18439,18440,18441,18444,18445,18449,18450,18454],{},"I think we are in the peak phase. I remember two years ago at",[27,18442,18443],{},"GeeCON 2013","as I saw the\nfirst ",[585,18446,12395],{"href":18447,"rel":18448},"http://2013.geecon.org/speakers/sam-newman.html",[589]," about it. Now it explodes, and you could hear at Devoxx\neverything about it:from theory and principles, through architecture, best practices, tools supporting it, monitoring,\ndevops, to live coding demos. Most of this talks were very enthusiastic about it, although in the next few months I\nexpect some more skeptical or at least balanced talks. There was\nactually ",[585,18451,12335],{"href":18452,"rel":18453},"https://web.archive.org/web/20150503201315/http://cfp.devoxx.pl:80/2015/talk/MZA-9564/Modularity_in_post_microservice_world",[589],"\nlike this.",[18,18456,18457],{},[2223,18458],{"alt":18459,"src":18460},"\"Microservices Live-Coding Demo\"","https://media.synyx.de/uploads//2015/06/IMG_20150623_154822.jpg",[18,18462,18463],{},[27,18464,18465],{},"2. Reactive and Resilient by default",[18,18467,18468],{},"The second most important topic at Devoxx was the resiliency and reactive programming, together with durability,\nasynchronous programming, circuit breaking and back pressure, which are all actually strongly connected with\nmicroservices.",[18,18470,18471],{},"Conclusion from the talks I have seen is quite obvious: if something can crash, it eventually will, most probably in the\nworst moment. That is why resiliency and recovery mechanisms are so important and should be not only a feature, but a\nmust, especially in the era of microservices.",[18,18473,18474],{},"The application (or actually the whole container or server) should be able to crash at any time and the supervisor\nshould take care of it. The other parts of the system should be able to continue their operation normally almost as if\nnothing had happened, circuit-breaking the failed system and using fall-back mechanisms, providing some simplified\ndata from other sources. And as soon the failed part of the system has been recovered, it should automatically back to\nnormal.",[18,18476,18477],{},[2223,18478],{"alt":18479,"src":18480},"\"Main Room\"","https://media.synyx.de/uploads//2015/06/PANO_20150622_155838.jpg",[18,18482,18483],{},[27,18484,18485],{},"3. Functional Programming breaks (slowly) though",[18,18487,18488],{},"Third most popular and still very hot topic is functional programming (or rather, a soft of FP). Thank to lambda\nexpressions, streams API, CompletableFutur, (semi)closures and tools like RxJava programmers slowly adopt more and more\nfunctional concepts and start to think of computation rather as a pipeline processing of immutable data instead of\nthinking of it as a mutating of objects’ state.",[18,18490,18491],{},"Furthermore, more and more developers found it increasingly important to write the code in such a way that there is no\nmutable state, including code at the method-level. I guess this is also a side-effect of using the IoC Containers,\nwhere many objects are simply the global singletons, and introducing the state in it would case a serious performance\nproblems and bugs.",[18,18493,18494],{},[2223,18495],{"alt":18496,"src":18497},"\"View over Wawel Castle from ICE Center\"","https://media.synyx.de/uploads//2015/06/PANO_20150624_174232.jpg",[18,18499,18500],{},[27,18501,18502],{},"4. Java 8 throttles the rise of the new languages",[18,18504,18505],{},"Two, three years ago, if you attend a Java conference, one of the most important thing which would be discussed was “Is\nthe Scala/Groovy/xyz the next Java?” or “What are the Java alternatives”. You won’t here it anymore. In some cases you\nwould hear that Scala/Groovy/xyz is better for implementing some kind of stuff like mathematical computations or data\npipeline processing or so. Or that some languages are better at concurrency and parallelism. Or that some languages are\nbetter for writing tests.",[18,18507,18508],{},"But there is no more doubt that Java will be soon replaced by other languages whatsoever, because with Java 8 and many (\nespecially “reactive”) mature libraries Java is simply good enough for most cases. At least for now.",[18,18510,18511],{},[27,18512,18513],{},"5. Java-Community focus on backend",[18,18515,18516],{},"Again, two, three years ago there very many talks about Mobile, Android, Web, Nodejs frameworks, new Javascript\nclient-side frameworks, and so called “Full Stack Developer”. In the past years on the java-Conference you can attend\nto many diverse talks – from Android Programming, through NodeJS and Javascript-Backend solutions and tool, to all the\nJavascript-Frontend stuff like AngularJS, EmberJS etc. It is not the case any more.",[18,18518,18519],{},"They all moved out to their own separate conferences, because no one doubts anymore that anyone can be a good Java (\nBackend) Developer, a good JS-Frontend Developer and Mobile Developer at once. There are simple to many problems to\nsolve on the “Backend”-Side, so one has to focus on Java-Code and (probably not so long and more) on “Data”.",[18,18521,18522],{},[27,18523,18524],{},"6. Big Data and NoSQL are here",[18,18526,18527],{},"Big Data and NoSQL were of course present, but it is not the same “Big Data” and the same NoSQL which it was for 3-4\nyears. Today we are not talking about what it is, what is the theory, etc. Today it is obvious for everyone, that there\nis nothing like “Big Data” and “NoSQL” databases.",[18,18529,18530],{},"There are only databases which better scale and handle some particular amount and type of data in particular\ncircumstances. That’s it, and the only thing we should do is learn how to recognize which data better fit to be stored\nin database X or Y, and how to use it properly. And the conference showed exactly this, that there are no SQL and NoSQL\ndatabases, but only that handle better data of the given characteristic.",[18,18532,18533],{},"There are simply databases which doing one thing better than another, for example better handle transactions and\nrelations between data, or scale very well but support no relations, or maybe are design to store and query text\ndocuments or graphs of objects.",[18,18535,18536],{},[27,18537,18538],{},"7. Spring is all what you need",[18,18540,18541],{},"It was probably for the first time I didn’t see any talk about alternative approach to Spring-Based Technologies (Java\nEE excluding).",[18,18543,18544],{},"This year there was nothing about any new or old frameworks. No Play Framework, dropwizard. No other smaller fancy\ntechnologies like RatPack or Spark Framework. Nothing. Zero. It has been always at least a couple talks about different\nframeworks and alternative approaches. Not this time. There wasn’t even Grails.",[18,18546,18547],{},"The same applies for the data-persistence solutions like ORMs for instance.",[18,18549,18550],{},"No new features in Hibernate, JPA. Nothing about jOOQ or myBatis/iBatis. Now it’s all about Spring Data. There is Spring\nData JPA, Spring Data MongoDB. Spring Data Neo4J, Cassandra, Redis, Elasticsearch, any more. There are also Spring\nsolutions for Big Data – Spring XD and for the microservices aka cloud stuff – Spring Cloud.",[18,18552,18553],{},"Spring is the only thing which you seems to need. Thus I’m waiting for the “Spring Developer” job titles instead of\n“Java Developer”, just like “SharePoint Developer” or “Liferay Developer” already.",[18,18555,18556],{},[27,18557,12384],{},[18,18559,18560],{},"Despite of many new buzzwords and “new” technologies, there was not technological revolution, not even close. Maybe\nmicroservices will revolutionize the way we are designing systems but they will do it not because they are a\nrevolutionary technology, but rather by combining many other technologies together instead of introducing something what\nis really new.",[18,18562,18563,18564,18569],{},"I think it might be true that the Java Industry and the IT world in general is in\nthe",[585,18565,18568],{"href":18566,"rel":18567},"https://vimeo.com/130981099#t=2m56s",[589],"inflection point",". New technologies aren’t a game-changer and every new\ntechnology needs more and more time to spread across the industry, so it feels like a little bit stagnant.",[18,18571,18572],{},"I have such a feeling that we have now a little bit time to catch our breath, right after the Cloud, Mobile, Big Data,\nAsynchronous, Functional and DevOps era and to prepare ourselves for the Next Big Thing, which is probably waiting for\nus around the corner and will pop up in the least expected moment.",{"title":48,"searchDepth":86,"depth":86,"links":18574},[],[613],"2015-07-02T10:34:54","So that’s it. Three days, 2.000 Developers from 20 countries, over 140 speakers from around the world, and one\\noutstanding beautiful city. It is for the first time, when Devoxx Poland (previously known\\nas 33rd Degree), one of the most recognizable European Java Conference took place in Krakow,\\nthe city of the polish kings, and one of the most important places in whole polish history. It took place from Monday to\\nWednesday last week in the ICE Conference Center, which is located directly by the Vistula\\nriver, with beautiful view over the Wawel Royal Castle.","https://synyx.de/blog/devoxx-poland-2015-summary/",{},"/blog/devoxx-poland-2015-summary",{"title":18392,"description":18582},"So that’s it. Three days, 2.000 Developers from 20 countries, over 140 speakers from around the world, and one\noutstanding beautiful city. It is for the first time, when Devoxx Poland (previously known\nas 33rd Degree), one of the most recognizable European Java Conference took place in Krakow,\nthe city of the polish kings, and one of the most important places in whole polish history. It took place from Monday to\nWednesday last week in the ICE Conference Center, which is located directly by the Vistula\nriver, with beautiful view over the Wawel Royal Castle.","blog/devoxx-poland-2015-summary",[18585,18586,18587,290,7265,18588,18589,18590,18591,1010],"devoxx","devoxx-conference","devoxx-poland","microservices","reactive","resilent","rxjava","So that’s it. Three days, 2.000 Developers from 20 countries, over 140 speakers from around the world, and one outstanding beautiful city. It is for the first time, when Devoxx…","QWK3ox8PnfXTyuA0bTKcDvbny5tTEOTpJwT99Bdaw6g",{"id":18595,"title":18596,"author":18597,"body":18599,"category":18699,"date":18700,"description":18701,"extension":617,"link":18702,"meta":18703,"navigation":499,"path":18704,"seo":18705,"slug":18603,"stem":18707,"tags":18708,"teaser":18710,"__hash__":18711},"blog/blog/lets-add-some-value-part2.md","Let's add some value (part2)",[18598],"eifler",{"type":11,"value":18600,"toc":18697},[18601,18604,18614,18617,18620,18623,18626,18629,18632,18635,18638,18641,18646,18655,18658,18661,18666,18669,18672,18677,18680,18683,18686,18689,18692],[14,18602,18596],{"id":18603},"lets-add-some-value-part2",[18,18605,18606,18607,18613],{},"In the ",[585,18608,18612],{"href":18609,"rel":18610,"title":18611},"http://blog.synyx.de/2014/11/lets-add-some-value/",[589],"Let’s add some value!","first part"," of my postings I talked\nabout the disadvantages of breaking epics down into technical stories and why it is preferable to create real user\nstories which enable us to deliver real value in each iteration.",[18,18615,18616],{},"In this part I´ll present objections that I have come across while talking with developers and product owners (POs)\nabout ‘creating value in each iteration’.",[18,18618,18619],{},"**#1 “We waste time and money by developing things which we do not need anymore once we finish the envisioned feature.”\n**",[18,18621,18622],{},"Sometimes we have an ideal scenario where we want to develop a big feature which can be broken down into smaller\nfeatures that each both generate real value and also naturally brings us closer towards our goal. But sometimes the only\nway to generate value early is by choosing sort of an alternative path which does not directly work towards our\nenvisioned goal.",[18,18624,18625],{},"Imagine a typical project where our goal is to make data which is currently managed via Excel available for customers in\nan online tool. But since the data is very complex it is not that simple to implement a good solution for the data\nadministration. So the team offers the idea to implement a CSV import as a fast possibility to get the data into the\nsystem. The PO however rejects the idea, reasoning that the import functionality will not be needed anymore once the\ndata administration is fully implemented.",[18,18627,18628],{},"As you may guess I did not make this scenario up. The PO I talked to in this situation just saw the extra work which did\nnot bring the project significantly closer to his original goal. My main point was that there was a chance, that we\nwould not even need the data administration once we had the CSV import since the users could go on managing their data\nwith their accustomed tools. But as it became clear that this was not an option I argued that the advantage of having a\nusable product very early would outweigh the extra cost of building the import feature.",[18,18630,18631],{},"The end of the story: Up till today the data admistration still does not cover all relevant cases and many parts of the\ndata are therefore still managed both in the new online tool and in the old Excel sheets. This might of course be a\nsign, that the import feature would also never have covered all the special cases but it would have been usable much\nearlier. The users waited over half a year for a usable feature because building the data administration took that long.",[18,18633,18634],{},"The lessons we should learn:",[18,18636,18637],{},"– Having a usable product early is worth some extra effort.",[18,18639,18640],{},"– An interim solution may prove to become a viable long-term solution.",[18,18642,18643],{},[27,18644,18645],{},"#2 “By starting with a small and simple implementation to solve problems early we can not get to a sophisticated\nfinal solution.”",[18,18647,18648,18649,18654],{},"Of course it is challenging for a team to build software which scales from a small solution to a large complex system\nwhich is still consistent. Therefore the big vision presented by the PO and future requirements which are already known\nshould always be considered when making technical decisions. But if we are always worried about developing the perfect\nsystem we must be careful not to produce an overengineered solution which does more than we really need. Features which\ndo not generate value are waste (see ",[585,18650,18653],{"href":18651,"rel":18652},"http://en.wikipedia.org/wiki/Lean_manufacturing",[589],"lean manufacturing",") and while\nhigh code quality generates value by ensuring expandability and maintainability, no one needs artificially bloated\nsolutions.",[18,18656,18657],{},"Also adding new requirements for a ‘complete’ product which was specifically designed to solve one epic story, can lead\nto more problems than adding requirements for a software which was developed step by step from the start.",[18,18659,18660],{},"I think the fundamental misconception behind this objection is the notion of software beeing eventually finished.\nBecause if we have a “final solution” in mind we automatically think that we can define all its requirements and based\non that a perfect plan. If such a perfect plan is possible for your complex problem: do not use an iterative approach!",[18,18662,18663],{},[27,18664,18665],{},"#3 “Only the complete feature will be adapted because partial solutions do not add enough value compared to existing\nsolutions”",[18,18667,18668],{},"This may of course be a valid argument from the PO, especially from a marketing perspective. Often enough I saw projects\nwhere a technical approach was chosen for a feature because “we can not deliver parts of it anyway”. If our product can\nnot go live after an iteration we can only generate virtual value. For example by providing a usable version for a small\ngroup of test users (beta version). Early feedback from real users should be valued highly!",[18,18670,18671],{},"Despite everything we should always be able to potentially go live because the situation may change or we may even find\na partial solution which would add value for some users at least. But this is only possible if we provide real features\nafter each iteration instead of technical parts which only function as prerequisites for real features in the future.",[18,18673,18674],{},[27,18675,18676],{},"#4 “My problem can not be broken down because it is just too complex”",[18,18678,18679],{},"This is the last objection I want to talk about. It is the one I hear the most and therefore it is also the one which\ninspired me to write this whole thing.",[18,18681,18682],{},"I admit it… sometimes it may just not be possible to build something that can be used by someone to solve a real problem\nafter one iteration.",[18,18684,18685],{},"But my point is that we come to this conclusion much too fast most of the time because it is easy. By telling ourselves\nthat the thing we are building is just too complex, we make it easier for us to fail. We do not have to commit to\ndeliver anything because we already convinced our stakeholders that it is not possible. If the whole project takes\nlonger than expected: “Well, we already told you that the task is very complex”. Agile was invented to tackle complex\nproblems! Great people all around the world use agile methods to solve things which are likely much more complex than\nthe problem we are working on right now. But if we are not able to deliver value, we are not agile.",[18,18687,18688],{},"If we want to prove that agile frameworks really do offer a great benefit for our business we should always strive for\nthe maximum satisfaction of our stakeholders.",[18,18690,18691],{},"Let us surprise our customers by delivering working software instead of explaining why they have to wait some more time\nfor it.",[18,18693,18694],{},[27,18695,18696],{},"Finding ways to generate real value early may be hard but it is worth the effort.",{"title":48,"searchDepth":86,"depth":86,"links":18698},[],[613],"2015-06-03T16:20:25","In the first part of my postings I talked\\nabout the disadvantages of breaking epics down into technical stories and why it is preferable to create real user\\nstories which enable us to deliver real value in each iteration.","https://synyx.de/blog/lets-add-some-value-part2/",{},"/blog/lets-add-some-value-part2",{"title":18596,"description":18706},"In the first part of my postings I talked\nabout the disadvantages of breaking epics down into technical stories and why it is preferable to create real user\nstories which enable us to deliver real value in each iteration.","blog/lets-add-some-value-part2",[7906,18709,14039],"processes","In the first part of my postings I talked about the disadvantages of breaking epics down into technical stories and why it is preferable to create real user stories which…","gu5llGQnrBczvGBCYzR23C23GWqEHxoMN1RhTMhunhQ",{"id":18713,"title":18714,"author":18715,"body":18716,"category":18988,"date":18989,"description":18990,"extension":617,"link":18991,"meta":18992,"navigation":499,"path":18993,"seo":18994,"slug":18720,"stem":18996,"tags":18997,"teaser":18998,"__hash__":18999},"blog/blog/entwicklertag-karlsruhe-2015.md","Entwicklertag Karlsruhe 2015",[7799,13652],{"type":11,"value":18717,"toc":18976},[18718,18721,18729,18733,18737,18740,18759,18774,18779,18783,18796,18801,18805,18809,18818,18821,18824,18833,18839,18843,18870,18874,18877,18949,18951,18959,18962,18971],[14,18719,18714],{"id":18720},"entwicklertag-karlsruhe-2015",[18,18722,15622,18723,18728],{},[585,18724,18727],{"href":18725,"rel":18726},"http://entwicklertag.de/karlsruhe/2015/",[589],"Karlsruher Entwicklertage"," hatten Geburtstag! Zehn Jahre alt wurden sie\nund das mussten wir synyxler uns anschauen. Wir freuten uns auf hochwertige Talks und das Wiedersehen mit vielen\nKarlsruher Kollegen und bekannten Gesichtern der nationalen Entwicklerszene. Und wir wurden nicht enttäuscht! Es gab\ndiesmal sechs parallele Tracks so dass niemand zu kurz kam. Egal ob DevOp, Coder, Scrum-Guru, Sicherheitsfanatiker oder\nBuzzword-Jongleur – jeder kam auf seine Kosten. Die thematische Aufteilung in Conference Day und Agile Day kam den\njeweiligen Präferenzen entgegen, auch wenn der Agile Day tatsächlich nur teilweise das Thema “Agile” bediente,\nvermutlich aus Mangel an Speakern in diesem Bereich. Im Folgenden gehen wir auf ein paar gelungene Talks der beiden Tage\nein.",[2207,18730,18732],{"id":18731},"conference-day","Conference Day",[649,18734,18736],{"id":18735},"spock-und-geb-übersichtlich-und-nachvollziehbarer-testen-für-alle","Spock und Geb: Übersichtlich und nachvollziehbarer testen für alle!",[18,18738,18739],{},"Wer schonmal Web-Integrationstests mit Selenium geschrieben hat, der kennt neben den vielen Vorzügen auch dessen\nProbleme – unter anderem in punkto Testfalldefinition, Wartbarkeit, Reporting.",[18,18741,18742,1628,18747,18752,18753,18758],{},[585,18743,18746],{"href":18744,"rel":18745},"https://twitter.com/tokraft",[589],"Tobias Kraft",[585,18748,18751],{"href":18749,"rel":18750},"https://twitter.com/RalfDMueller",[589],"Ralf Müller"," stellten\neine ",[585,18754,18757],{"href":18755,"rel":18756},"http://entwicklertag.de/karlsruhe/2015/spock-und-geb-bersichtlich-und-nachvollziehbar-testen-f-r-alle",[589],"interessante Alternative","\nvor, die es mit etwas Initialaufwand möglich macht, dass der Product Owner (PO) ohne technische Kenntnisse Testfälle\ngeneriert. Diese werden von den Entwicklern mit wenigen Zeilen Groovy Code ausimplementiert und liefern danach\nansehnliche und aussagekräftige Reports an den PO zurück.",[18,18760,18761,18762,18767,18768,18773],{},"Der verwendete Stack besteht aus dem\nGroovy-Testtool ",[585,18763,18766],{"href":18764,"rel":18765},"http://www.next-gamer.de/wp-content/uploads/2015/02/mr-spock.jpg",[589],"Spock",", dem Webtesting\nFramework ",[585,18769,18772],{"href":18770,"rel":18771},"https://web.archive.org/web/20170227233745/http://www.gebish.org/testing",[589],"Geb",", einer Auswahl an\nverschiedenen Reporting Tools und Überraschung! – Excel! Schließlich soll auch der Fachbereich ein Werkzeug in die Hand\nbekommen, mit dem er sich wohl fühlt.",[18,18775,18776],{},[2223,18777],{"alt":48,"src":18778},"https://media.synyx.de/uploads//2015/05/excel_spez.png",[649,18780,18782],{"id":18781},"java-web-security-anti-patterns","Java Web-Security Anti-Patterns",[18,18784,18785,8713,18790,18795],{},[585,18786,18789],{"href":18787,"rel":18788},"https://twitter.com/dschadow",[589],"Dominik Schadows",[585,18791,18794],{"href":18792,"rel":18793},"http://entwicklertag.de/karlsruhe/2015/java-web-security-anti",[589],"Vortrag zur Sicherheit im Web","\nwar nichts für schwache Nerven, denn wer ihn gelassen und ruhigen Gewissens besuchte, der kam unter Umständen\nnägelkauend und zähneklappernd wieder raus. Die Session war wie ein guter Horrorfilm und das eigene Projekt war das\nMonster! Dominik warf anschauliche Fallbeispiele auf die Leinwand, die klar zeigten, wie man Sicherheit im Web NICHT\nmacht und so mancher hat darin garantiert sein eigenes Machwerk wiedererkannt. Gibt es ein Threat Model? Ist die\nAuthentifizierung langsam genug, um Brute Force zu verhindern? Wird die Session ID zur rechten Zeit erneuert? Wer diese\nund viele weitere Fragen mit “Ja” beantworten konnte war einer der wenigen Glücklichen, die den Talk mit einem guten\nGefühl verlassen konnten. Die Session war augenöffnend und einer der wertvollsten Beiträge zur Konferenz.",[18,18797,18798],{},[2223,18799],{"alt":48,"src":18800},"https://media.synyx.de/uploads//2015/05/password_scream.png",[2207,18802,18804],{"id":18803},"agile-day","Agile Day",[649,18806,18808],{"id":18807},"software-die-jeder-mag-schnell-und-innovativ","Software , die jeder mag – schnell und innovativ",[18,18810,18811,18812,18817],{},"Jürgen Lind und ",[585,18813,18816],{"href":18814,"rel":18815},"https://twitter.com/bdam",[589],"Adam Egger"," stellten ein Konzept vor, welches sich von der\nSoftwareentwicklung mit drei Phasen – Anforderung, Entwicklung und Testen – deutlich unterscheidet. Ihr Konzept sieht\neine starke Kollaboration aller beteiligten Gruppen vor und ist in vier Phasen aufgeteilt.",[18,18819,18820],{},"In der ersten Phase sollen der Nutzer und dessen Bedürfnisse sowie die Domäne verstanden werden, sodass die Sicht des\nKunden eingenommen werden kann. Die Probleme des Kunden zu analysieren und definieren ist Teil der folgenden\nanalytischen Phase. Unterstützt werden diese Phasen von verschiedenen Methodiken wie z.B. ‘5-Why’. Diese Methode zielt\ndarauf ab den Kern des Problems zu finden, indem man immer wieder die ‘Warum?’ Frage stellt, wodurch das eigentliche\nProblem zum Vorschein gebracht werden soll.",[18,18822,18823],{},"Nachdem die Probleme identifiziert wurden, wird ein Lösungsentwurf angestrebt. Dabei helfen die ‘Crazy 8s’. Bei dieser\nMethode faltet man ein Blatt Papier dreimal, sodass acht Rechtecke entstehen. Diese Rechtecke werden mit acht\nverschiedenen Entwürfen gefüllt, wobei potentielle Lösungsentwürfe entstehen.",[18,18825,18826,18827,18832],{},"Die vierte Phase schließt das Modell mit einem Prototyp ab. Dieser kann auf verschiedenen Wegen entstehen – zum Beispiel\nals Skribble auf einem Blatt Papier. Softwarelösungen wie ’Prototyper’, ",[585,18828,18831],{"href":18829,"rel":18830},"https://popapp.in",[589],"‘POP’"," und ähnliches sind\nAlternativen.",[18,18834,18835],{},[2223,18836],{"alt":18837,"src":18838},"jukebox","https://media.synyx.de/uploads//2015/05/phases.png",[649,18840,18842],{"id":18841},"funktionale-programmierung","Funktionale Programmierung",[18,18844,18845,18846,18851,18852,18857,18858,18863,18864,18869],{},"Gleich zwei Talks befassten sich mit dem Thema funktionale\nProgrammierung. ",[585,18847,18850],{"href":18848,"rel":18849},"https://twitter.com/NicoleRauch",[589],"Nicole Rauch"," bot\neinen ",[585,18853,18856],{"href":18854,"rel":18855},"http://entwicklertag.de/karlsruhe/2015/jetzt-funkts-funktionale",[589],"gelungenen Einstieg"," in die Thematik indem sie\ntypische Sprachkonstrukte verschiedener funktionaler Sprachen mit den objektorientierten Pedants verglich. Dabei wurde\nschnell klar, welche Vorzüge Sprachen wie Haskell und Scala gegenüber großen Playern wie Java haben können. Auf diese\nVorzüge ging ",[585,18859,18862],{"href":18860,"rel":18861},"https://twitter.com/sperbsen",[589],"Michael Sperber"," anhand\neines ",[585,18865,18868],{"href":18866,"rel":18867},"http://entwicklertag.de/karlsruhe/2015/funktionale",[589],"anschaulichen Beispiels"," ausführlicher ein. Er erklärte,\nwarum grundlegende funktionale Konzepte wie Immutability eine tolle Sache sind und warum wir viel mehr Software mit\nfunktionalen statt objektorientierten Sprachen entwickeln sollten.",[2207,18871,18873],{"id":18872},"was-gabs-noch","Was gabs noch?",[18,18875,18876],{},"Eine Menge Talks, die richtig gut waren aber für die wir in so einem kleinen Blogeintrag nicht genug Platz haben:",[577,18878,18879,18888,18902,18911,18925,18934],{},[580,18880,18881,18882,18887],{},"Hardy Ferentschik, der wunderbar\ndetailliert ",[585,18883,18886],{"href":18884,"rel":18885},"http://entwicklertag.de/karlsruhe/2015/jmh-micro-benchmarking",[589],"beschreiben konnte",", wie man mit\nMicro-Benchmarking auf der JVM prüft ob Code A oder Code B schneller ist.",[580,18889,18890,18895,18896,18901],{},[585,18891,18894],{"href":18892,"rel":18893},"https://twitter.com/hschwentner",[589],"Henning Schwentner",", der mit\neiner ",[585,18897,18900],{"href":18898,"rel":18899},"http://entwicklertag.de/karlsruhe/2015/value-objects-next-big",[589],"Demonstration der Möglichkeiten von Value Types","\neinen Blick in die Zukunft Javas warf",[580,18903,18904,18905,18910],{},"Dr. Mana Taghdiri, die in hohem Tempo\nverschiedene ",[585,18906,18909],{"href":18907,"rel":18908},"http://entwicklertag.de/karlsruhe/2015/methodologies-and-tools",[589],"Tools und Methoden"," zur automatischen\nTestgenerierung beleuchtete",[580,18912,18913,18918,18919,18924],{},[585,18914,18917],{"href":18915,"rel":18916},"https://twitter.com/tim_roes",[589],"Tim Roes",", der einen\nschnellen ",[585,18920,18923],{"href":18921,"rel":18922},"http://entwicklertag.de/karlsruhe/2015/web-today-and-tomorrow",[589],"Überblick"," über vergangene, aktuelle und\nzukünftige Webtechnologien verschaffte und diese mit Codebeispielen veranschaulichte",[580,18926,18927,18928,18933],{},"Gebhard Ebeling und Mario Krahmer, die versuchten\ndie ",[585,18929,18932],{"href":18930,"rel":18931},"http://entwicklertag.de/karlsruhe/2015/mut-zur-l-cke-testl-cken",[589],"Waage zwischen absoluter Sicherheit und kalkuliertem Risiko","\nbei der Testabdeckung zu finden",[580,18935,18936,18937,18942,18943,18948],{},"Prof. Dr. Jörn Müller-Quade vom ",[585,18938,18941],{"href":18939,"rel":18940},"http://www.kit.edu/index.php",[589],"KIT",",\nder ",[585,18944,18947],{"href":18945,"rel":18946},"https://entwicklertag.de/karlsruhe/2015/beweisbare-sicherheit-von.html",[589],"Beweisbare Sicherheit von der Verschlüsselung bis hin zum Softwareschutz","\nvorstellte. Dabei zeigte er wie man Sicherheit anhand von Modellen und Annahmen beweisen und somit ein Verständnis für\nSicherheit entwickeln kann.",[2207,18950,969],{"id":968},[18,18952,18953,18954,986],{},"Der Besuch hat sich gelohnt, wir kommen gerne wieder! Die Talks waren durch die Bank von ordentlicher bis hervorragender\nQualität und die parallelen Tracks mit großer Bandbreite an Themen ließen keine Wünsche offen. Wir konnten deutlich mehr\nnützliche Infos in unseren Arbeitsalltag mitnehmen als bei der noch nicht so\netablierten ",[585,18955,18958],{"href":18956,"rel":18957},"http://blog.synyx.de/2015/02/entwicklertag-frankfurt-2015/",[589],"Schwesterkonferenz in Frankfurt",[18,18960,18961],{},"Die Keynotes des Conference Day gingen für unseren Geschmack zu sehr in Richtung Buzzword-Bingo, was aber Holger\nKoschek und Rolf Drähter am Agile Day durch ihre Sangeskraft locker wieder ausbügelten.",[18,18963,18964,18965,18970],{},"Auch am Umfeld mit Catering, Kaffee, Ganggesprächen, Zeitplanung und Organisation gab es nix zu meckern. Vielen Dank an\nden Ausrichter ",[585,18966,18969],{"href":18967,"rel":18968},"https://www.andrena.de/",[589],"andrena",", der sich zu seinem zwanzigjährigen Bestehen eine würdige\nGeburtstagskonferenz modelliert hat!",[18,18972,18973],{},[2223,18974],{"alt":18837,"src":18975},"https://media.synyx.de/uploads//2015/05/jukebox.jpg",{"title":48,"searchDepth":86,"depth":86,"links":18977},[18978,18982,18986,18987],{"id":18731,"depth":86,"text":18732,"children":18979},[18980,18981],{"id":18735,"depth":126,"text":18736},{"id":18781,"depth":126,"text":18782},{"id":18803,"depth":86,"text":18804,"children":18983},[18984,18985],{"id":18807,"depth":126,"text":18808},{"id":18841,"depth":126,"text":18842},{"id":18872,"depth":86,"text":18873},{"id":968,"depth":86,"text":969},[613],"2015-05-29T14:01:30","Die Karlsruher Entwicklertage hatten Geburtstag! Zehn Jahre alt wurden sie\\nund das mussten wir synyxler uns anschauen. Wir freuten uns auf hochwertige Talks und das Wiedersehen mit vielen\\nKarlsruher Kollegen und bekannten Gesichtern der nationalen Entwicklerszene. Und wir wurden nicht enttäuscht! Es gab\\ndiesmal sechs parallele Tracks so dass niemand zu kurz kam. Egal ob DevOp, Coder, Scrum-Guru, Sicherheitsfanatiker oder\\nBuzzword-Jongleur – jeder kam auf seine Kosten. Die thematische Aufteilung in Conference Day und Agile Day kam den\\njeweiligen Präferenzen entgegen, auch wenn der Agile Day tatsächlich nur teilweise das Thema “Agile” bediente,\\nvermutlich aus Mangel an Speakern in diesem Bereich. Im Folgenden gehen wir auf ein paar gelungene Talks der beiden Tage\\nein.","https://synyx.de/blog/entwicklertag-karlsruhe-2015/",{},"/blog/entwicklertag-karlsruhe-2015",{"title":18714,"description":18995},"Die Karlsruher Entwicklertage hatten Geburtstag! Zehn Jahre alt wurden sie\nund das mussten wir synyxler uns anschauen. Wir freuten uns auf hochwertige Talks und das Wiedersehen mit vielen\nKarlsruher Kollegen und bekannten Gesichtern der nationalen Entwicklerszene. Und wir wurden nicht enttäuscht! Es gab\ndiesmal sechs parallele Tracks so dass niemand zu kurz kam. Egal ob DevOp, Coder, Scrum-Guru, Sicherheitsfanatiker oder\nBuzzword-Jongleur – jeder kam auf seine Kosten. Die thematische Aufteilung in Conference Day und Agile Day kam den\njeweiligen Präferenzen entgegen, auch wenn der Agile Day tatsächlich nur teilweise das Thema “Agile” bediente,\nvermutlich aus Mangel an Speakern in diesem Bereich. Im Folgenden gehen wir auf ein paar gelungene Talks der beiden Tage\nein.","blog/entwicklertag-karlsruhe-2015",[],"Die Karlsruher Entwicklertage hatten Geburtstag! Zehn Jahre alt wurden sie und das mussten wir synyxler uns anschauen. Wir freuten uns auf hochwertige Talks und das Wiedersehen mit vielen Karlsruher Kollegen…","QIfpLDM6lljLsBBxaU9AHA1jAqQItGOaTTn3Fhg8yjE",{"id":19001,"title":19002,"author":19003,"body":19004,"category":19116,"date":19117,"description":48,"extension":617,"link":19118,"meta":19119,"navigation":499,"path":19120,"seo":19121,"slug":19008,"stem":19122,"tags":19123,"teaser":19124,"__hash__":19125},"blog/blog/schulesynyx-the-self-training-company.md","schule@synyx – the self-training company",[7799],{"type":11,"value":19005,"toc":19112},[19006,19009,19013,19016,19019,19026,19030,19037,19040,19043,19046,19051,19068,19073,19090,19095,19109],[14,19007,19002],{"id":19008},"schulesynyx-the-self-training-company",[2207,19010,19012],{"id":19011},"the-training-issue","The training issue",[18,19014,19015],{},"One issue that every company has to deal with is the training of its employees. I encountered different attitudes\nregarding this subject in different companies and wondered what is a healthy approach for a software project company to\npursue.",[18,19017,19018],{},"In software development it is not sufficient to offer one workshop per year and call it a “training program”. It is also\nnot enough to have one or two R&D dudes per 50 developers that keep ahead of new technologies and tell the worker\ndrones from time to time what to use for their projects. The result of half-assed concepts like this will be that\nmotivated, willing-to-learn developers look for other jobs and you will eventually remain with a bunch of static\nby-the-book workers who have no interest in learning. Relying solely on this can lead a company into deprecation\nwithin few years.",[18,19020,19021,19022,19025],{},"A more healthy approach is to maintain a constant mindset of learning and innovation and you have to pull ",[573,19023,19024],{},"all"," of your\ndevelopers into it. The progress should come from them and out of their own motivation, it must not be dictated by\nmanagement. Sadly that is a very rare condition. It requires not only an open-minded management but also employees that\nare eager to learn new things and are curious about new ways of solving problems – traits that every good developer\nshould have in this fast-moving industry.",[2207,19027,19029],{"id":19028},"the-synyx-approach","The synyx approach",[18,19031,19032,19033,19036],{},"Here at synyx we have pretty good prerequisites to maintain this condition. Our bosses encourage us to spend work time\non our own education and the general mentality among the employees is that learning new stuff is cool. Among other\nthings we have one important tool here at synyx to keep this mindset alive. It is called “",[27,19034,19035],{},"schule@synyx","” and is\nessentially an employee self-training program, that works like this:",[18,19038,19039],{},"Every Friday afternoon there is a reserved time slot of 1-2 hours in our largest meeting room. One employee voluntarily\ngives a talk about a specific work-related topic, that he chooses himself, and all other employees are free to attend.\nThe talk can have tutorial-, workshop- or just informational character. The subjects spread out on a wide variety of\nsoftware development related fields. Examples from the recent time are talks about systemd, JavaScript linting, system\narchitecture, Puppet, Android development, NoSQL modeling, UX, Spring security, Docker. These talks touch the topics\noperations, application development, GUI design, software quality, mobile and persistence from different angles on\ndifferent expert levels. During and after the talk discussions arise about different aspects of the subject, that are\nsometimes continued into after hours beer time.",[18,19041,19042],{},"Neither the speaker nor the attendees have to spend money or their free time on this – the preparation and attendance\nbelong to the 20% part of synyx’s 80/20 work time model.",[18,19044,19045],{},"I can not emphasize enough how valuable this is to us! Just think about how everyone benefits from it:",[18,19047,19048],{},[27,19049,19050],{},"The company benefits from …",[577,19052,19053,19056,19059,19062,19065],{},[580,19054,19055],{},"… expert knowledge spread throughout the company",[580,19057,19058],{},"… new methods and technologies introduced into the company’s knowledge pool",[580,19060,19061],{},"… maintaining a high level of knowledge diversity among the developers",[580,19063,19064],{},"… a huge motivation boost among the employees",[580,19066,19067],{},"… a growing and up-to-date pool of potentially quality talks for conferences, customers and external educational\npurposes like universities or user groups",[18,19069,19070],{},[27,19071,19072],{},"The employees …",[577,19074,19075,19078,19081,19084,19087],{},[580,19076,19077],{},"… as project developers have to deal with all of those fields anyway and gain a wider knowledge portfolio just by\nattending the talks",[580,19079,19080],{},"… get to learn cool new things every week presented in a convenient way",[580,19082,19083],{},"… get a chance to self-dependently shape their training and education",[580,19085,19086],{},"… can exchange different opinions on the subjects in the subsequent discussions",[580,19088,19089],{},"… have no pressure or obligation to deliver something. No boss will ever order employee X to do a talk about Y on day\nZ",[18,19091,19092],{},[27,19093,19094],{},"The speaker (who imo is the major beneficiary) …",[577,19096,19097,19100,19103,19106],{},[580,19098,19099],{},"… gains speaking experience in front of a small, familiar crowd",[580,19101,19102],{},"… gets the chance to aquire deep knowledge in one subject that he is interested in",[580,19104,19105],{},"… receives feedback and discussion input about his subject from the attendees",[580,19107,19108],{},"… introduces himself as know-how holder, maybe even expert on the subject",[18,19110,19111],{},"I would call that a solid win-win-win situation. And it greatly helps to establish the vital learning and innovation\nmindset mentioned in the introduction. Incomprehensibly self-dependent training elements like this seem to be the\nexception in the industry, so I consider us synyx employees pretty lucky about our situation 🙂",{"title":48,"searchDepth":86,"depth":86,"links":19113},[19114,19115],{"id":19011,"depth":86,"text":19012},{"id":19028,"depth":86,"text":19029},[613],"2015-05-04T13:33:56","https://synyx.de/blog/schulesynyx-the-self-training-company/",{},"/blog/schulesynyx-the-self-training-company",{"title":19002,"description":48},"blog/schulesynyx-the-self-training-company",[],"The training issue One issue that every company has to deal with is the training of its employees. I encountered different attitudes regarding this subject in different companies and wondered…","1M3fqYlcQkWaInHIzGwS1CCyMX64l6YdfMAavnoUoPM",{"id":19127,"title":19128,"author":19129,"body":19130,"category":19375,"date":19376,"description":19377,"extension":617,"link":19378,"meta":19379,"navigation":499,"path":19380,"seo":19381,"slug":19134,"stem":19383,"tags":19384,"teaser":19387,"__hash__":19388},"blog/blog/entwicklertag-frankfurt-2015.md","Entwicklertag Frankfurt 2015",[7799,13652],{"type":11,"value":19131,"toc":19368},[19132,19135,19178,19182,19196,19202,19206,19214,19223,19238,19247,19251,19259,19265,19268,19271,19277,19286,19289,19293,19335,19341,19343,19346,19354,19363],[14,19133,19128],{"id":19134},"entwicklertag-frankfurt-2015",[18,19136,19137,19138,19143,19144,19149,19150,19155,19156,19159,19160,19165,19166,19171,19172,19177],{},"Während der ",[585,19139,19142],{"href":19140,"rel":19141},"http://www.entwicklertag.de/",[589],"Karlsruher Entwicklertag"," der ",[585,19145,19148],{"href":19146,"rel":19147},"http://www.andrena.de/",[589],"andrena objects ag","\nschon seit 2010 ein etabliertes Event in Karlsruhe ist, brachte der Veranstalter die Konferenz dieses Jahr erst zum\nzweiten Mal nach Frankfurt. Fünf synyx-Kollegen dachten sich: “Die nehmen wir doch mit!” und bestiegen den ICE in\nRichtung Deutschlands Bankenmetropole. Die Entwicklergemeinde wurde dort in einem Gebäude der Goethe-Universität\nempfangen und mit ausreichend Kaffee und Mate für den ganzen Tag versorgt. Bereits in der Opening Session zeichnete sich\ndie hohe Qualität der Veranstaltung ab als ",[585,19151,19154],{"href":19152,"rel":19153},"http://sdq.ipd.kit.edu/people/ralf_reussner/",[589],"Professor Ralf Reussner","\nvom ",[585,19157,18941],{"href":18939,"rel":19158},[589]," einen\neindrucksvollen ",[585,19161,19164],{"href":19162,"rel":19163},"https://entwicklertag.de/frankfurt/2015/opening-session-was-brauchen-softwaretechniker-um-ingenieure-zu-werden",[589],"Einblick in die Informatik-Forschung","\nbot. Mit den von ihm betreuten Forschungsprojekten ",[585,19167,19170],{"href":19168,"rel":19169},"http://www.palladio-simulator.com/",[589],"Palladio","\nund ",[585,19173,19176],{"href":19174,"rel":19175},"https://sdqweb.ipd.kit.edu/wiki/Vitruvius",[589],"Vitruvius (WIP)"," stellte er mächtige Werkzeuge vor, mit denen man schon\nvor der Implementierung eines Projekts die Auswirkungen von Designentscheidungen abschätzen kann. Nach diesem gelungenen\nAuftakt begann die eigentliche Konferenz mit in drei Tracks strukturierten Talks.",[649,19179,19181],{"id":19180},"leadership-hacks","Leadership Hacks",[18,19183,19184,19185,19190,19191,19195],{},"Eine der originellsten und interessantesten Sessions war ",[585,19186,19189],{"href":19187,"rel":19188},"https://twitter.com/benjamin",[589],"Benjamin Reitzammers"," Talk\nüber ",[585,19192,19181],{"href":19193,"rel":19194},"https://entwicklertag.de/frankfurt/2015/5-leadership-hacks-oder-wie-ich-meine-ideen-umgesetzt-bekomme",[589],".\nStatt der erwarteten Tricks zur Mitarbeitermanipulation lieferte er eine Anleitung, wie man durch sensibles und\nrücksichtsvolles Teamplayerverhalten seine Kollegen auf die eigene Seite bringt. Dabei spielen so einfache Dinge wie\nMittagessen und Zuhören schon eine große Rolle. Er appellierte in diesem Zusammenhang an Eigenschaften wie Sensibilität,\nVerlässlichkeit, Selbstreflexion und Kommunikation. Seine wichtigste Message: Don’t be a Rockstar!",[18,19197,19198],{},[2223,19199],{"alt":19200,"src":19201},"\"rockstar\"","https://media.synyx.de/uploads//2015/02/rockstar.png",[649,19203,19205],{"id":19204},"pecha-kucha","Pecha Kucha",[18,19207,19208,19209,19213],{},"Eine für uns neue Vortragstechnik, welche wir auf den Frankfurter Entwicklertagen kennenlernen durften, war\ndas ",[585,19210,19205],{"href":19211,"rel":19212},"http://de.wikipedia.org/wiki/Pecha_Kucha",[589],". Bei dieser Form des Vortrages wird die Anzahl der Folien\nauf 20 sowie die Projektionszeit je Folie auf 20 Sekunden begrenzt. Das Format versprach viel Informationen in kurzer\nZeit und so war es auch. Zudem gab es viel zu lachen. Ein durchaus gelungenes Format mit tollen Vorträgen.",[18,19215,19216,19217,19222],{},"Dr. Michael Eichberg gelang es in seinem\nVortrag ",[585,19218,19221],{"href":19219,"rel":19220},"http://www.entwicklertag.de/frankfurt/2015/your-jdk-devil-dark",[589],"Your JDK – The Devil is in the Dark"," mit kurzen\nund durchaus amüsanten Codeausschnitten des OpenJDK 8 einen Einblick in die kleinen Abgründe der Programmierung zu\nvermitteln. Unser Vorschlag für den Leadership Hack #6:Your JDK – The Devil is in the Dark mit einem Kollegen\nanschauen.",[18,19224,19225,19226,19231,19232,19237],{},"Im Votrag ",[585,19227,19230],{"href":19228,"rel":19229},"http://www.entwicklertag.de/frankfurt/2015/its-all-about-fun",[589],"It’s All About The Fun"," von Jens Schauder gab\nes einen kurzen Abriss über die Wichtigkeit des Spaßes in einem Unternehmen und dass dieser nie zu kurz kommen darf.\nAuch durchaus ungeliebte Aufgaben können mit der ",[585,19233,19236],{"href":19234,"rel":19235},"http://de.wikipedia.org/wiki/Pomodoro-Technik",[589],"Pomodoro-Technik"," und\neinem ‘Bonbon’ nach dieser Tätigkeit zu einem positiven Effekt führen. Ein schöner Vortrag dem wir uns nur anschließen\nkönnen. Spaß bei der Arbeit zeigt uns, warum wir uns dazu entschlossen haben, unser Hobby zum Beruf zu machen.",[18,19239,19240,19241,19246],{},"Jeder hat ihn in der Firma und kennt ihn nur zu gut.\nDen ",[585,19242,19245],{"href":19243,"rel":19244},"http://www.entwicklertag.de/frankfurt/2015/der-prozess-sheriff-eine-unliebsame-spezies",[589],"Prozess Sheriff",". Er\nbeharrt darauf den Prozess strikt und ohne Kompromisse zu folgen. Ob dieser in einem agilen Umfeld mit Scrum noch eine\nBerechtigung hat stellte uns Steffen Brandt vor. Ein netter Talk um sich an seine Kollegen zu erinnern und ein wenig zu\nschmunzeln.",[649,19248,19250],{"id":19249},"antifragile-software-für-die-welt-des-21-jahrhunderts","Antifragile Software für die Welt des 21. Jahrhunderts",[18,19252,19253,19254,19258],{},"Der Nachmittag startete mit einem sehr spannenden Vortrag von Johannes\nLink, ",[585,19255,19250],{"href":19256,"rel":19257},"https://entwicklertag.de/frankfurt/2015/keynote-antifragile-software-f%C3%BCr-die-welt-des-21-jahrhunderts",[589],",\nüber die Antifragilität von Systemen und was dies bedeutet. Fragil ist wahrscheinlich jedem von uns ein Begriff. Es\nbeschreibt die Eigenschaft eines Systems, welches durch starke Belastungen von außen beschädigt oder sogar zerstört\nwerden könnte. Entscheidend dabei ist, dass dieses Verhalten deterministisch anhand der Stärke der Belastung wiederholt\nhervorgerufen werden kann.",[18,19260,19261],{},[2223,19262],{"alt":19263,"src":19264},"\"fragil\"","https://media.synyx.de/uploads//2015/02/fragil.png",[18,19266,19267],{},"Natürliche Systeme wie zum Beispiel die Menschheit sind hingegen antifragil. Sie werden durch Belastungen ihrer Umwelt\nzunächst stärker, bis die Belastung zu extrem wird und eine negative Wirkung eintritt.",[18,19269,19270],{},"Wenn man dieses Konzept auf Softwaresysteme anwenden könnte, würde dies bedeuten, dass ein System sich selbst\nstabilisiert und anhand von, zum Beispiel, unüblichen Anfragen lernen würde. Das System würde robuster werden. Dieser\nAnsatz ist neu und sehr spannend und wirft die Frage auf, ob man ein System erschaffen kann, welches z.B. anhand von\nzyklisch wiederkehrenden selbst initiierten DoS Attacken antifragiler werden würde, sozusagen eine DoS Impfung erhält?",[18,19272,19273],{},[2223,19274],{"alt":19275,"src":19276},"\"hormesis\"","https://media.synyx.de/uploads//2015/02/hormesis2.png",[18,19278,19279,19280,19285],{},"In der Natur wird dieses Phänomen ",[585,19281,19284],{"href":19282,"rel":19283},"http://de.wikipedia.org/wiki/Hormesis",[589],"Hormesis"," genannt. Bei der ein Organismus,\nbzw. in unserem Fall ein System, eine positive Wirkung bei Belastung erfährt. Erst bei einer Überdosis ist die\nFunktionalität nicht mehr gewährleistet und das System kann seinen Dienst nicht mehr erfüllen.",[18,19287,19288],{},"Zusammengefasst ein sehr guter Vortrag und ein spannendes Themengebiet. Mehr davon!",[649,19290,19292],{"id":19291},"weitere-vorträge","Weitere Vorträge",[18,19294,19295,19296,19299,19300,19305,19306,19171,19311,19316,19317,19322,19323,19328,19329,19334],{},"Auch von den restlichen Vorträgen waren einige erwähnenswert. Hagen Buchwald vom\nVeranstalter ",[585,19297,19148],{"href":19146,"rel":19298},[589]," erläuterte auf hohem Niveau und mit vielen Erfahrungen im\nGepäck inwiefern der Pfad eines Unternehmens zur Agilität von\nder ",[585,19301,19304],{"href":19302,"rel":19303},"https://entwicklertag.de/frankfurt/2015/agile-transition-core-culture-matters",[589],"Unternehmenskultur"," abhängt. Den\ngoldenen Pixel für den kreativsten Vortrag bekommen ",[585,19307,19310],{"href":19308,"rel":19309},"https://twitter.com/jensbroos",[589],"Jens Broos",[585,19312,19315],{"href":19313,"rel":19314},"https://twitter.com/mrupilo",[589],"Martin Ruprecht"," verliehen, die durch eine beeindruckende Anzahl von Analogien\nbelegten, was\ndie ",[585,19318,19321],{"href":19319,"rel":19320},"https://entwicklertag.de/frankfurt/2015/was-wir-software-entwickler-vom-modernen-fussball-lernen-k%C3%B6nnen",[589],"Softwareentwicklung mit Fußball","\ngemein hat und was wir uns bei unserer täglichen Arbeit von den Kickern abschauen sollten. Einer der wenigen Vorträge\nmit Code auf den Folien war der Talk von ",[585,19324,19327],{"href":19325,"rel":19326},"https://twitter.com/dasniko",[589],"Niko Köbler",", der verschiedene Wege und\nToolchains\naufzeigte, ",[585,19330,19333],{"href":19331,"rel":19332},"https://entwicklertag.de/frankfurt/2015/nodejs-auf-der-jvm-nodyn-und-avatarjs-im-vergleich",[589],"Node.js auf der JVM","\nzu nutzen, wobei anscheinend Nashörner aller Art eine größere Rolle spielen.",[18,19336,19337],{},[2223,19338],{"alt":19339,"src":19340},"\"aufstellung\"","https://media.synyx.de/uploads//2015/02/aufstellung.png",[649,19342,969],{"id":968},[18,19344,19345],{},"Wieder zurück auf unserem heimischen Bürostuhl können wir sagen, dass der Entwicklertag Frankfurt eine nette kleine\nKonferenz ist, die man sich anschauen sollte, wenn man in der Nähe ist.",[18,19347,19348,19349,19353],{},"Die Qualität der Talks war hoch, die Atmosphäre angenehm und das Catering ließ kaum Wünsche offen. Die Folien\nsind ",[585,19350,16075],{"href":19351,"rel":19352},"http://www.entwicklertag.de/frankfurt/2015/programm",[589]," öffentlich zugänglich. Das Einzige Manko dieses Jahr:\nEs wurden für unseren Geschmack zu wenige Informationen vermittelt, die unsere konkrete tägliche Arbeit als Entwickler\nverbessern können. Zu einem großen Anteil wurden die Themen in Form abstrakter Konzepte auf hohem Level behandelt.\nObwohl wir das generell begrüßen und die Themen dank der guten Speaker interessant präsentiert wurden, haben wir mehr\nkonkrete Beispiele und Vermittlung von Best Practices vermisst. Das Fazit der Konferenz ist daher: Neat stuff, but too\nmeta. Relevant XKCD:",[18,19355,19356],{},[585,19357,19360],{"href":19358,"rel":19359},"http://xkcd.com/1447/",[589],[2223,19361],{"alt":48,"src":19362},"http://imgs.xkcd.com/comics/meta-analysis.png",[18,19364,19365],{},[573,19366,19367],{},"Quelle: xkcd.com",{"title":48,"searchDepth":86,"depth":86,"links":19369},[19370,19371,19372,19373,19374],{"id":19180,"depth":126,"text":19181},{"id":19204,"depth":126,"text":19205},{"id":19249,"depth":126,"text":19250},{"id":19291,"depth":126,"text":19292},{"id":968,"depth":126,"text":969},[613],"2015-02-26T17:41:10","Während der Karlsruher Entwicklertag der andrena objects ag\\nschon seit 2010 ein etabliertes Event in Karlsruhe ist, brachte der Veranstalter die Konferenz dieses Jahr erst zum\\nzweiten Mal nach Frankfurt. Fünf synyx-Kollegen dachten sich: “Die nehmen wir doch mit!” und bestiegen den ICE in\\nRichtung Deutschlands Bankenmetropole. Die Entwicklergemeinde wurde dort in einem Gebäude der Goethe-Universität\\nempfangen und mit ausreichend Kaffee und Mate für den ganzen Tag versorgt. Bereits in der Opening Session zeichnete sich\\ndie hohe Qualität der Veranstaltung ab als Professor Ralf Reussner\\nvom KIT einen\\neindrucksvollen Einblick in die Informatik-Forschung\\nbot. Mit den von ihm betreuten Forschungsprojekten Palladio\\nund Vitruvius (WIP) stellte er mächtige Werkzeuge vor, mit denen man schon\\nvor der Implementierung eines Projekts die Auswirkungen von Designentscheidungen abschätzen kann. Nach diesem gelungenen\\nAuftakt begann die eigentliche Konferenz mit in drei Tracks strukturierten Talks.","https://synyx.de/blog/entwicklertag-frankfurt-2015/",{},"/blog/entwicklertag-frankfurt-2015",{"title":19128,"description":19382},"Während der Karlsruher Entwicklertag der andrena objects ag\nschon seit 2010 ein etabliertes Event in Karlsruhe ist, brachte der Veranstalter die Konferenz dieses Jahr erst zum\nzweiten Mal nach Frankfurt. Fünf synyx-Kollegen dachten sich: “Die nehmen wir doch mit!” und bestiegen den ICE in\nRichtung Deutschlands Bankenmetropole. Die Entwicklergemeinde wurde dort in einem Gebäude der Goethe-Universität\nempfangen und mit ausreichend Kaffee und Mate für den ganzen Tag versorgt. Bereits in der Opening Session zeichnete sich\ndie hohe Qualität der Veranstaltung ab als Professor Ralf Reussner\nvom KIT einen\neindrucksvollen Einblick in die Informatik-Forschung\nbot. Mit den von ihm betreuten Forschungsprojekten Palladio\nund Vitruvius (WIP) stellte er mächtige Werkzeuge vor, mit denen man schon\nvor der Implementierung eines Projekts die Auswirkungen von Designentscheidungen abschätzen kann. Nach diesem gelungenen\nAuftakt begann die eigentliche Konferenz mit in drei Tracks strukturierten Talks.","blog/entwicklertag-frankfurt-2015",[7906,19385,7721,19386,5846,14039,6884],"ausbildung","etffm","Während der Karlsruher Entwicklertag der andrena objects ag schon seit 2010 ein etabliertes Event in Karlsruhe ist, brachte der Veranstalter die Konferenz dieses Jahr erst zum zweiten Mal nach Frankfurt.…","Btw4IgxkifwFP3VIqPjGqsCL8uf60AJ4xKsGG3mj-_E",{"id":19390,"title":19391,"author":19392,"body":19394,"category":19611,"date":19612,"description":19401,"extension":617,"link":19613,"meta":19614,"navigation":499,"path":19615,"seo":19616,"slug":19617,"stem":19618,"tags":19619,"teaser":19620,"__hash__":19621},"blog/blog/rancid-on-ubuntu-14-10.md","RANCID on Ubuntu 14.10",[19393],"kesler",{"type":11,"value":19395,"toc":19609},[19396,19399,19402,19405,19413,19418,19421,19430,19433,19446,19451,19454,19464,19467,19487,19490,19497,19500,19519,19524,19527,19549,19554,19557,19564,19567,19574,19577,19580,19585,19588,19602],[14,19397,19391],{"id":19398},"rancid-on-ubuntu-1410",[18,19400,19401],{},"Just a quick one today…",[18,19403,19404],{},"RANCID (Really Awesome New Cisco config Differ) is a software to monitor a routers software and hardware configuration,\nand to maintain history of configuration changes by using CVS.",[18,19406,19407,19408,986],{},"If you need more information about Rancid, ",[585,19409,19412],{"href":19410,"rel":19411},"http://www.shrubbery.net/rancid/",[589],"you can take a look at their website",[18,19414,19415],{},[27,19416,19417],{},"Installing RANCID",[18,19419,19420],{},"Installing Rancid is easy:",[11259,19422,19423],{},[18,19424,19425,19426,19429],{},"root@",[53,19427,19428],{},"server",":/# apt-get install rancid",[18,19431,19432],{},"After the installation, we can check for a new group and user on the system:",[11259,19434,19435,19440,19443],{},[18,19436,19425,19437,19439],{},[53,19438,19428],{},":/# cat /etc/group /etc/passwd | grep rancid",[18,19441,19442],{},"rancid❌133:",[18,19444,19445],{},"rancid❌122:133::/var/lib/rancid:/bin/bash",[18,19447,19448],{},[27,19449,19450],{},"Configuring RANCID",[18,19452,19453],{},"First, let’s create groups to organize our devices, such as “switches” and “router” groups, and/or you might want to\ngroup them by their location.",[11259,19455,19456,19461],{},[18,19457,19425,19458,19460],{},[53,19459,19428],{},":/# vi /etc/rancid/rancid.conf",[18,19462,19463],{},"LIST_OF_GROUPS=”switches”",[18,19465,19466],{},"To receive Email-Notifications about configuration changes, we need to add email aliases in our /etc/aliases file.",[11259,19468,19469,19474,19477,19480],{},[18,19470,19425,19471,19473],{},[53,19472,19428],{},":/# vi /etc/aliases",[18,19475,19476],{},"rancid-admin-switches: rancid-switches",[18,19478,19479],{},"rancid-switches: admins",[18,19481,19482,19483],{},"admins: ",[585,19484,19486],{"href":19485},"mailto:admin@your-domainname.xyz","admin@your-domainname.xyz",[18,19488,19489],{},"Now we need to run rancid-cvs, to create the CVS folder structure that our device configurations will be stored in.\nMake sure to run this command as the RANCID user.",[11259,19491,19492],{},[18,19493,19425,19494,19496],{},[53,19495,19428],{},":/# sudo su -c /var/lib/rancid/bin/rancid-cvs -s /bin/bash -l rancid",[18,19498,19499],{},"You should now see a bunch of new directories in /var/lib/rancid, named after the groups you defined earlier (in our\nexample, this would be /var/lib/rancid/switches. Inside each of these directories, there will be a file named router.db.\nThis is where we tell RANCID what devices exist for each group. The format of the device definition is “hostname:type:\nstatus”, where “hostname” is the FQDN or IP, “type” is the type of the device and “status” is up or down.",[11259,19501,19502,19507,19510,19513,19516],{},[18,19503,19425,19504,19506],{},[53,19505,19428],{},":/# vi /var/lib/rancid/switches/router.db",[18,19508,19509],{},"switch1.your-domainname.xyz:hp:up",[18,19511,19512],{},"switch2.your-domainname.xyz:hp:up",[18,19514,19515],{},"1.2.3.4:hp:up",[18,19517,19518],{},"1.2.3.5:hp:down",[18,19520,19521],{},[27,19522,19523],{},"Login & Authentication",[18,19525,19526],{},"Next, we edit /var/lib/rancid/.cloginrc to tell RANCID how to access the devices. Depending on your devices, this might\nor might not be fairly easy or quite complicated. It’s best to man cloginrc to see all available options that you can\nuse. In our example, we use a simple HP-Router setup:",[11259,19528,19529,19534,19537,19540,19543,19546],{},[18,19530,19425,19531,19533],{},[53,19532,19428],{},":/# vi /var/lib/rancid/switches/.cloginrc",[18,19535,19536],{},"add method switch1.your-domainname.xyz {ssh}",[18,19538,19539],{},"add cyphertype switch1.your-domainname.xyz {3des}",[18,19541,19542],{},"add user switch1.your-domainname.xyz {username}",[18,19544,19545],{},"add password switch1.your-domainname.xyz {password} {enable_password}",[18,19547,19548],{},"add autoenable switch1.your-domainname.xyz",[18,19550,19551],{},[27,19552,19553],{},"Testing our setup",[18,19555,19556],{},"We can test our setup by using clogin with a configuration and device specified:",[11259,19558,19559],{},[18,19560,19425,19561,19563],{},[53,19562,19428],{},":/# /usr/lib/rancid/bin/clogin -f /var/lib/rancid/.cloginrc switch1.your-domainname.xyz",[18,19565,19566],{},"If you have done everything right, you will end up in enable mode on the specified device. It’s time to test the real\nthing now: Let’s go ahead and manually invoke a rancid-run.",[11259,19568,19569],{},[18,19570,19425,19571,19573],{},[53,19572,19428],{},":/# sudo su -c /var/lib/rancid/bin/rancid-run -s /bin/bash -l rancid",[18,19575,19576],{},"This command may take a while to run, depending on how many devices you have configured. After it finished, you should\nreceive Emails from RANCID sent to the addresses that you specified earlier. You can now also review the logfiles in\n/var/log/rancid to see if there are any problems, and check for the downloaded configuration files in\n/var/lib/rancid//configs.",[18,19578,19579],{},"You might end up running into a problem though, where logging in with clogin works fine, but when trying to actually run\na rancid-run, it will leave you with a “timeout” or “password incorrect” error. I found out this can cause quite a\nheadache and is caused by rancid-run depending on certain options in the .cloginrc, that are not necessary when testing\nthe .cloginrc options with clogin itself. Again, i can only point out to man cloginrc for further information and\ntroubleshooting.",[18,19581,19582],{},[27,19583,19584],{},"Finalizing RANCID",[18,19586,19587],{},"Finally, we want to automate RANCID, by creating a cronjob that calls “rancid-run” on a regular basis. Depending on\nyour needs, you can run it every 15 Minutes, once a Week or any time period in between. In this example, we trigger a\nrancid-run every half hour:",[11259,19589,19590,19595,19599],{},[18,19591,19425,19592,19594],{},[53,19593,19428],{},":/# sudo su -c “/usr/bin/crontab -e -u rancid”",[14,19596,19598],{"id":19597},"m-h-dom-mon-dow-command","m h dom mon dow command",[18,19600,19601],{},"/30 * * * * /usr/bin/rancid-run",[18,19603,19604,19605,986],{},"If you end up running into any problems or just want more information, you can also check the RANCID mailing\nlist: ",[585,19606,19607],{"href":19607,"rel":19608},"http://www.shrubbery.net/pipermail/rancid-discuss/",[589],{"title":48,"searchDepth":86,"depth":86,"links":19610},[],[6869,613],"2015-02-06T16:01:42","https://synyx.de/blog/rancid-on-ubuntu-14-10/",{},"/blog/rancid-on-ubuntu-14-10",{"title":19391,"description":19401},"rancid-on-ubuntu-14-10","blog/rancid-on-ubuntu-14-10",[],"Just a quick one today… RANCID (Really Awesome New Cisco config Differ) is a software to monitor a routers software and hardware configuration, and to maintain history of configuration changes…","36uGhK01I3KkQWwND_79GpxNtdFPy0B-EdZw5blrWlM",{"id":19623,"title":19624,"author":19625,"body":19627,"category":19877,"date":19878,"description":19879,"extension":617,"link":19880,"meta":19881,"navigation":499,"path":19882,"seo":19883,"slug":19631,"stem":19885,"tags":19886,"teaser":19888,"__hash__":19889},"blog/blog/javascript-linting-tool-evaluation.md","Javascript Linting Tool Evaluation",[19626,13652],"mueller",{"type":11,"value":19628,"toc":19868},[19629,19632,19646,19671,19680,19683,19687,19755,19758,19761,19774,19789,19792,19809,19812,19815,19826,19830,19833,19836,19839,19850,19854,19857,19863,19865],[14,19630,19624],{"id":19631},"javascript-linting-tool-evaluation",[18,19633,19634,19635,19640,19641,19645],{},"In our internal JavaScript ‘User Group’ (called JS-Posse in honour of the\nlegendary ‘",[585,19636,19639],{"href":19637,"rel":19638},"http://www.javaposse.com",[589],"The Java Posse","‘ by Dick Wall, Chet Haase et al.), we recently decided to evaluate\nalternatives to our current JavaScript linting standart, JSHint. Although well established by now among different\ndevelopment teams across ",[585,19642,6885],{"href":19643,"rel":19644},"http://www.synyx.de",[589],", using it never felt 100% comfortable. A quick Google search left\nus with three alternatives:",[577,19647,19648,19656,19663],{},[580,19649,19650,19655],{},[585,19651,19654],{"href":19652,"rel":19653},"http://jslint.com",[589],"JSLint"," by Doug Crockford himself",[580,19657,19658],{},[585,19659,19662],{"href":19660,"rel":19661},"https://developers.google.com/closure/utilities/",[589],"Closure Linter by Google",[580,19664,19665,19670],{},[585,19666,19669],{"href":19667,"rel":19668},"http://eslint.org",[589],"ESLint",", the new kid on the block",[18,19672,19673,19674,19679],{},"…as well as ",[585,19675,19678],{"href":19676,"rel":19677},"http://jshint.com/",[589],"JSHint"," itself, of course.",[18,19681,19682],{},"We drew up a quick spreadsheet for evaluating the tools and came up with the following.",[2207,19684,19686],{"id":19685},"criteria","Criteria",[577,19688,19689,19695,19701,19707,19713,19719,19725,19731,19737,19743,19749],{},[580,19690,19691,19694],{},[27,19692,19693],{},"Performance"," How long does it take to run over our example project, a single page webapp with a couple of thousands\nof JavaScript LOC?",[580,19696,19697,19700],{},[27,19698,19699],{},"Licensing"," Does the license meet our requirements (and those of our customers, of course)?",[580,19702,19703,19706],{},[27,19704,19705],{},"Project health/adoption"," How healthy is the project? Is it on Github, and is it well maintained?",[580,19708,19709,19712],{},[27,19710,19711],{},"Completeness of configurations"," Does the tool cover all our use-cases for a linting tool?",[580,19714,19715,19718],{},[27,19716,19717],{},"Productivity (rule set creation / project setup)"," When creating a new project, is it difficult to create a matching\nruleset? Does the tool come with a reasonable default rule set, or do you need to set up all the checks yourself?",[580,19720,19721,19724],{},[27,19722,19723],{},"Productivity (active software development)"," During active development, does the tool assist the developer in\nwriting quality code, or does it bully you to the point where you’d rather abolish using a linting tool at all?",[580,19726,19727,19730],{},[27,19728,19729],{},"Quality of Documentation/Tutorials/Self Help"," How good is the project documentation? When the tool breaks the build\nwith a certain error message, how difficult is it to find reliable information on the error in question (why does it\noccur, why is it a bad practice, how to fix it)?",[580,19732,19733,19736],{},[27,19734,19735],{},"Ability to integrate with existing projects"," Is it possible to integrate the linting tool in an existing projects\nwithout making changes to the project to comply to the rules?",[580,19738,19739,19742],{},[27,19740,19741],{},"Integration with build tool"," Is it possible to integrate the linting tool into your build chain to receive direct\nfeedback?",[580,19744,19745,19748],{},[27,19746,19747],{},"ES6 support"," How well does the project support future versions of the language?",[580,19750,19751,19754],{},[27,19752,19753],{},"Pluggable"," Is it possible to extend the given rule set with custom checks?",[2207,19756,19678],{"id":19757},"jshint",[18,19759,19760],{},"The first tool we looked at was the already-familiar JSHint. We already knew what was bothering us about it:",[577,19762,19763,19771],{},[580,19764,19765,19766,19770],{},"Its hard to find the documentation for a certain error message. While the error messages itself are mostly\nself-explanatory, it can be somewhat difficult to find out how to deactivate or customize a certain rule. For\nexample, ",[585,19767,19768],{"href":19768,"rel":19769},"http://jshint.com/docs/options/",[589]," JSHint has both ‘enforcing’ and ‘relaxing’ rules. While setting a ‘enforcing’\nrule to true turns it on, setting a ‘relaxing’ rule to true deactivates it.",[580,19772,19773],{},"More often than not, using JSHint can be frustrating. For example, we had ‘maxdepth’",[18,19775,19776,19777,19780,19781,19784,19785,19788],{},"set to 3, meaning a maximum of three nested blocks of code was allowed. In case one of those blocks was a ‘",[573,19778,19779],{},"for … in","‘\nstatement, JSHint would (correctly) complain that its body should be wrapped in an ‘",[573,19782,19783],{},"if(obj.hasOwnProperty(key))","\n‘-check to filter out unwanted properties. However, doing so meant introducing another nested block, and if that pushed\nthe total depth beyond ‘",[573,19786,19787],{},"maxdepth","‘, JSHint would fail the build. The solution was usually to introduce private helper\nfunctions, which can make otherwise trivial code difficult to read (since you have to skip blocks of code).",[18,19790,19791],{},"Of course, that is not really the fault of JSHint (seeing that it only did what it was told to do), but it was a rather\nbig annoyance that caused us to re-evaluate our JavaScript linting practices in the first place.",[577,19793,19794],{},[580,19795,19796,19797,19802,19803,19808],{},"Being a fork of JSLint, JSHint has the same license containing the\ninfamous ",[585,19798,19801],{"href":19799,"rel":19800,"title":19801},"http://en.wikipedia.org/wiki/JSLint#License",[589],"JSLint License"," ‘Good, not evil’ statement.\nWhile we understand its humorous intent (and being\na ",[585,19804,19807],{"href":19805,"rel":19806,"title":19807},"https://synyx.de/unternehmen/verantwortung_csr/",[589],"fairly social responsible company",",\nwe wholeheartedly support it), we were worried that some corporate lawyer might not approve of our use of a tool bound\nto such a license.",[2207,19810,19654],{"id":19811},"jslint",[18,19813,19814],{},"After JSHint we decided to evaluate the old guy in the gang, JSLint. It was the first linting tool for JavaScript, and\nit feels like that. From our opinion JSLint has two major problems, besides the license (see JSHint):",[577,19816,19817,19820,19823],{},[580,19818,19819],{},"The website of JSLint is very old-school and does not contain any explanations of the ruleset or any information to\nget a link between the error messages provided by JSLint and the problem in the code. So you have to search through\nthe internet to find any third-party explanation that will help you to fix the problem.",[580,19821,19822],{},"Some of the rules of JSLint are, at least, strangely named. There is a rule to forbid (or allow, if deactivated)\n‘stupidity’. The project’s web page does not provide any explanation (again) – resorting to Google, we found out that\n‘stupidity’ referred to the usage of synchronous functions in Node.js’s file system module.",[580,19824,19825],{},"The rule set is very strict and, when you look through the GitHub issues and pull requests, it is very hard to\nparticipate in the JSLint project. That’s why the community is very small and there are a lot more people active in\nJSHint and other projects and bring in their ideas there. Maybe that is why JSLint does not provide a rule similar to\n‘latedef’ from JSHint or ‘no-use-before-define’ from ESLint. Without this rule it is very hard to structure your\ncode with private named-functions at the end without assigning the function to a variable at the start (and that\nwould not be what we want).",[2207,19827,19829],{"id":19828},"closure-linter","Closure Linter",[18,19831,19832],{},"The Closure Linter is part of Google’s Closure tool set. It was designed for internal use and provides very little\noptions for customization. Since following the rules enforced by it seems to be mandatory within Google, that is\ncertainly an acceptable practice. However, since it is (for example) not possible to change the default maximum line\nlength of 80 characters, we quickly decided not to look into the tool any more.",[2207,19834,19669],{"id":19835},"eslint",[18,19837,19838],{},"When looking at ESLint, we were quick to decide that we might be looking at a potential winner:",[577,19840,19841,19844,19847],{},[580,19842,19843],{},"While the project is (by far) the youngest (or as others might put it: the least mature) of the four tools we looked\nat, it is also the best maintained. 100+ contributors on Github and a roughly monthly release schedule speak for\nthemselves.",[580,19845,19846],{},"Applying it to our example web app, we were surprised to find out that it was sufficient to write about ten lines of\nconfiguration to perform the same amount of checks that required around a hundred lines in JSHint. Of course, that\nmight only mean that we have the same idea of quality JavaScript code as the tool’s authors, but nevertheless it meant\nthat the tool would be quite easy to adopt into our development process.",[580,19848,19849],{},"The output is quite handy: It prints both a one-line human readable error message as well as an error code for a\nquick lookup in the documentation.",[2207,19851,19853],{"id":19852},"evaluation","Evaluation",[18,19855,19856],{},"The criteria from above has been weighted from five to 15, from not important to important, and the tools got a 0, 0.5\nor 1 if it does not, almost or absolute fulfill the criterion.",[18,19858,19859],{},[2223,19860],{"alt":19861,"src":19862},"\"jsLinting\"","https://media.synyx.de/uploads//2015/02/jsLinting2.png",[2207,19864,12384],{"id":12383},[18,19866,19867],{},"As you can see in the image above, ESLint proved to be the winner of our evaluation. We decided that its major flaw, the\npotential immaturity, was acceptable to us since by its nature, it would only be used during (internal) development. The\nease of use, both because of the robust and reasonable default rule set and the high-quality documentation, outweighed\nany concern by far. We are looking forward to adopt ESLint into our development tool chain over the coming weeks and\nmonths!",{"title":48,"searchDepth":86,"depth":86,"links":19869},[19870,19871,19872,19873,19874,19875,19876],{"id":19685,"depth":86,"text":19686},{"id":19757,"depth":86,"text":19678},{"id":19811,"depth":86,"text":19654},{"id":19828,"depth":86,"text":19829},{"id":19835,"depth":86,"text":19669},{"id":19852,"depth":86,"text":19853},{"id":12383,"depth":86,"text":12384},[613,996],"2015-02-03T09:45:38","In our internal JavaScript ‘User Group’ (called JS-Posse in honour of the\\nlegendary ‘The Java Posse‘ by Dick Wall, Chet Haase et al.), we recently decided to evaluate\\nalternatives to our current JavaScript linting standart, JSHint. Although well established by now among different\\ndevelopment teams across synyx, using it never felt 100% comfortable. A quick Google search left\\nus with three alternatives:","https://synyx.de/blog/javascript-linting-tool-evaluation/",{},"/blog/javascript-linting-tool-evaluation",{"title":19624,"description":19884},"In our internal JavaScript ‘User Group’ (called JS-Posse in honour of the\nlegendary ‘The Java Posse‘ by Dick Wall, Chet Haase et al.), we recently decided to evaluate\nalternatives to our current JavaScript linting standart, JSHint. Although well established by now among different\ndevelopment teams across synyx, using it never felt 100% comfortable. A quick Google search left\nus with three alternatives:","blog/javascript-linting-tool-evaluation",[7611,19835,19852,7265,14756,19757,19811,19887,6884],"linting","In our internal JavaScript ‘User Group’ (called JS-Posse in honour of the legendary ‘The Java Posse‘ by Dick Wall, Chet Haase et al.), we recently decided to evaluate alternatives to…","QcXdToJb-JTf6znqD4o0GbdwGfu2XcNA6y-tOvH75ck",{"id":19891,"title":19892,"author":19893,"body":19895,"category":20159,"date":20160,"description":20161,"extension":617,"link":20162,"meta":20163,"navigation":499,"path":20164,"seo":20165,"slug":19899,"stem":20167,"tags":20168,"teaser":20174,"__hash__":20175},"blog/blog/time-series-data-is-the-the-new-big-data.md","Time Series Data is the the new Big Data",[19894,19393],"mennerich",{"type":11,"value":19896,"toc":20146},[19897,19900,19919,19940,19944,19953,19957,19975,19984,19988,20006,20010,20020,20034,20037,20041,20044,20052,20055,20059,20068,20072,20075,20079,20088,20092,20095,20129,20132],[14,19898,19892],{"id":19899},"time-series-data-is-the-the-new-big-data",[18,19901,19902,19903,19908,19909,19914,19915],{},"On 22 November 2014, the ",[585,19904,19907],{"href":19905,"rel":19906},"https://2014.nosql-matters.org/bcn/homepage/",[589],"NoSQL matters conference"," took place in\nBarcelona at the ",[585,19910,19913],{"href":19911,"rel":19912},"http://www.uab-casaconvalescencia.org/en/index.php",[589],"Casa Convalescència",", which is doubtless one of\nthe most beautiful locations for a conference! The Casa was declared a Historical Artistic Monument in 1978, and a World\nCultural Heritage Site in 1997, and these great halls are a great place for great\nspeakers.",[2223,19916],{"alt":19917,"src":19918},"\"20141122_090344\"","https://media.synyx.de/uploads//2014/11/20141122_090344.jpg",[18,19920,19921,19922,19927,19928,19933,19934,19939],{},"This year, Ellen Friedman (",[585,19923,19926],{"href":19924,"rel":19925},"https://twitter.com/ellen_friedman",[589],"@Ellen_Friedman",") and Ted\nDunning (",[585,19929,19932],{"href":19930,"rel":19931},"https://twitter.com/ted_dunning",[589],"@ted_dunning",") were among them, and it is always a pleasure to listen the\nboth of them pointing out actual society-changing trends in modern big data and NoSQL technologies. Ellen Friedman held\nthe keynote, and their common topic was the necessity of processing time series data. Ellen Friedman has experience in a\nwide range of scientific fields. She is a committer for the Apache Mahout project, and contributes to Apache Drill. Ted\nDunning is also involved in various Apache projects, as a committer and PMC member of Mahout, ZooKeeper and Drill, and\nis also mentor for Apache Storm, DataFu, Flink and Optiq. Both are working for ",[585,19935,19938],{"href":19936,"rel":19937},"http://mapr.com",[589],"MapR"," at the moment.\nMapR also held a training at the training day of NoSQL matters Barcelona.",[2207,19941,19943],{"id":19942},"there-is-time-series-data-everywhere","There is (time series) data everywhere",[18,19945,19946,19947,19952],{},"Time series data processing and real time data analysis are a big issue nowadays, and topic of many of the last years\nNoSQL conference talks. The world gets more and more distributed, there are sensors everywhere, reporting thousands of\nmeasurement each second. The so called ",[585,19948,19951],{"href":19949,"rel":19950},"http://en.wikipedia.org/wiki/Internet_of_Things",[589],"Internet of Things"," (IoT)\nproduces an enormous amount of data every day: From smart meters in plants to smart shirts for athletes, almost every\nobject in our everyday life has the ability to emit data. But how to store and query the data efficiently? And first of\nall: Why do we need all the data and what to with it?",[649,19954,19956],{"id":19955},"the-history-of-time-series-data","The history of Time Series Data",[18,19958,19959,19960,99,19963,11792,19965,19968,19969,19974],{},"Time series are an old idea, the city of Barcelona stores data about the citizens extensively since the 13th Century. An\nimpressive example of a ",[27,19961,19962],{},"crowdsourced",[27,19964,12515],{},[27,19966,19967],{},"big data"," analysis project dates back about 170\nyears: ",[585,19970,19973],{"href":19971,"rel":19972},"http://en.wikipedia.org/wiki/Matthew_Fontaine_Maury",[589],"Matthew Fountaine Maury",", a mariner with the United States\nNavy in the mid-19th century, who was forced to desk-work after a leg injury left him unfit for sea duty, devoted his\ntime to the study of navigation, meteorology, winds, and currents. As officer-in-charge of the Navy office in\nWashington, DC, Maury became a librarian of the many unorganized log books and records in 1842. There, he sought to\nimprove seamanship through sorting the available, yet unorganized information in his office, analyzing roughly one\nbillion data points by hand!",[18,19976,19977,19978,19983],{},"His thorough analysis resulted in his ",[585,19979,19982],{"href":19980,"rel":19981},"http://icoads.noaa.gov/maury.pdf",[589],"wind and weather charts",". Maury made them\nwidely available by sharing them among other Captains, on the condition that they report and share their own logs back\nto his office, therefore providing a constant data base to continuously improve his charts. In 1848, Captain Jackson was\nable to shorten his journey from Baltimore to Rio de Janeiro by more than a month by exploiting Maurys charts. After\nthat Maurys charts spread among Captains, and in 1853 the data was the basis for the fastest voyage from New York to San\nFrancisco, made by the Flying Cloud under the female navigator Eleanor Creesy – a record that lasted for over a hundred\nyears.",[649,19985,19987],{"id":19986},"see-with-your-eyes-closed-think-with-your-eyes-open","See with your eyes closed – think with your eyes open",[18,19989,19990,19991,19994,19995,19998,19999,20002,20003,986],{},"This example does not only show how time series data analysis can be used for informed ",[27,19992,19993],{},"data-driven decisions",", it\nalso shows that thinking about your use-cases and data is important. What is it you want to achieve, and what does your\ndata tell you? Ellen Friedman proposed to close your eyes for a moment or two, and think about the data you gathered,\n",[573,19996,19997],{},"look"," at it the right way and let it tell you what is in it. With your eyes open again, try to find out more about it,\nsearch for trends and hidden secrets – it is basically like a crime story. Maurys vision of the charts was ",[573,20000,20001],{},"eyes closed\nseeing",", his keen observation and focus on the details ",[573,20004,20005],{},"eyes open thinking",[649,20007,20009],{"id":20008},"big-data-in-the-blink-of-an-eye","Big data in the blink of an eye",[18,20011,20012,20013,20015,20016,20019],{},"Todays sensors emit much more data than Maury had available to take into account for his charts. Thousands to millions\nof data points are collected by sensors, smart meters, RFIDs and many more every second of every day. In modern power\nplants almost every part, from pumps to valves, constantly sends data about its state, temperature, processed fluids and\nmany other information. All this data is valuable and offers enormous opportunities: Critical states that might not have\nbeen taken into account, could lead to failure and thanks to being able to detect unusual data values reported from the\nsensors, life threatening situations can be detected ",[27,20014,17993],{}," they occur. Correlations of events, deducible from the\nevents of the time series of different parts can help to understand situations leading to failures, and therefore reduce\nrisks in the future. ",[27,20017,20018],{},"Prediction"," of material fatigue and failure behavior could be achieved as well as *\n*classification** and **anomaly detection**. In general, in a wide range of events ranging from natural sciences to\nmonetary businesses to marketing to medical care",[577,20021,20022,20025,20028,20031],{},[580,20023,20024],{},"prognostication",[580,20026,20027],{},"introspection",[580,20029,20030],{},"prediction and",[580,20032,20033],{},"diagnosis",[18,20035,20036],{},"might get possible.",[2207,20038,20040],{"id":20039},"how-to-use-time-series-databases","How to use time series databases",[18,20042,20043],{},"Now that we understand why time series data is valuable, we are interested in how to process them? Friedman and Dunning\nnicely explained the necessity of dedicated time series database technologies. Usually, time series data is very simple:\nA (static) data source emits time/value pairs, and thats basically it. If you have",[577,20045,20046,20049],{},[580,20047,20048],{},"a huge amount of (time series) data, and",[580,20050,20051],{},"queries mostly based on time or time ranges",[18,20053,20054],{},"you might think about using a time series databases (TSDB) that enables you to efficiently analyze the data.",[649,20056,20058],{"id":20057},"why-not-use-a-relational-database-system","Why not use a relational database system?",[18,20060,20061,20062,20067],{},"Interestingly, data storage is less of a problem than efficient data retrieval. A traditional relational database\nsystem (RDBMS) does not suffice when it comes to efficient time series data retrieval. The overhead generated by unused\ntransaction management and query optimizers, together with the row-by-row retrieval forced by star schemas, does not\nallow for efficient response times. And again, scaling of an RDBMS is hardly possible. The solution are specialized\nTSDBs, based on open source NoSQL technologies, and a smart data model to overcome said deficiencies. As a foundation,\nthe distributed file system of Hadoop is appropriate, backed by the NoSQL wide column store Hbase (or MapR-DB). The\nclever combination of semi-structured wide columns with blob compression techniques can lead to rates of up to 100\nmillion data point per second on a 10 nodes, good equipped, cluster with 4 nodes active. Quite an impressive data rate.\nThe usage of in-memory structures enables fast computation, and write-ahead logs ensure reliable durability of the\ndatabase. A time series database system is implemented with ",[585,20063,20066],{"href":20064,"rel":20065},"http://opentsdb.net/",[589],"OpenTSDB",", based on MapR-DB and\nHadoops HDFS. On a 4-node MapR-DB cluster, 30 million data point can be retrieved, aggregated and plotted in less than\n20 seconds.",[649,20069,20071],{"id":20070},"where-to-go-from-here","Where to go from here?",[18,20073,20074],{},"What can we do with all this data, why should we collect and keep it, and what can we learn from it? There is a variety\nof use cases, including machine learning techniques, that classify data or detect anomalies. There are good algorithms\navailable, and the combination of open source technologies backed by Hadoop make the Hadoop ecosystem applicable: That\nmeans the availability of Apache Mahout for machine learning, Apache Spark for data analysis (at the moment preferable\nover Hive), or Apache Drill for data analytics.",[2207,20076,20078],{"id":20077},"and-this-is-how-nosql-changes-society","And this is how NoSQL changes society",[18,20080,20081,20082,20087],{},"Friedman and Dunning pointed out how modern society’s Internet has reversed the flow of data: Instead of demanding data\nfrom servers, applications now often push (time series) data into databases. NoSQL and open source technologies allow\nfor the analysis of these data, hence enabling society to take advantage of all the data gathered. Maury gave a good\nexample, and his proceeding hopefully becomes a widely accepted and widespread way to gather and share data for data\nanalytics. An example is the ",[585,20083,20086],{"href":20084,"rel":20085},"http://en.wikipedia.org/wiki/Unique_Identification_Authority_of_India",[589],"Aadhaar-project","\nthat aims for identification without regard to cast, creed, religion or geography in India to ensure better welfare\nservices, and that runs on the NoSQL database MapR-DB. Let’s use the potential and power of the technologies together\nfor the better good of society.",[2207,20089,20091],{"id":20090},"further-readings","Further readings",[18,20093,20094],{},"As this blog can only give a brief motivation and introduction into the topic of times series databases, the interested\nreader who likes to get deeper insights is referred to the literature and the references therein. 😉",[577,20096,20097,20105,20113,20120],{},[580,20098,20099,20104],{},[585,20100,20103],{"href":20101,"rel":20102},"https://web.archive.org/web/20150502150101/http://info.mapr.com:80/resources-ebook-Time-Series-Databases.html",[589],"Time series databases","\nby Ted Dunning and Ellen Friedman",[580,20106,20107,20112],{},[585,20108,20111],{"href":20109,"rel":20110},"http://shop.oreilly.com/product/0636920034650.do",[589],"Practical Machine Learning: A New Look at Anomaly Detection"," by Ted\nDunning and Ellen Friedman",[580,20114,20115,20112],{},[585,20116,20119],{"href":20117,"rel":20118},"https://www.mapr.com/practical-machine-learning",[589],"Practical Machine Learning: Innovations in Recommendation",[580,20121,20122,20123,20128],{},"The talks by Friedman and Dunning on the topic given at ",[585,20124,20127],{"href":20125,"rel":20126},"https://2014.nosql-matters.org/bcn/abstracts/",[589],"NoSQL matters","\nin Barcelona",[18,20130,20131],{},"Or just download some software and try it out yourself:",[577,20133,20134,20139],{},[580,20135,20136],{},[585,20137,20066],{"href":20064,"rel":20138},[589],[580,20140,20141],{},[585,20142,20145],{"href":20143,"rel":20144},"https://github.com/mapr-demos/opentsdb",[589],"Open source MapR extensions",{"title":48,"searchDepth":86,"depth":86,"links":20147},[20148,20153,20157,20158],{"id":19942,"depth":86,"text":19943,"children":20149},[20150,20151,20152],{"id":19955,"depth":126,"text":19956},{"id":19986,"depth":126,"text":19987},{"id":20008,"depth":126,"text":20009},{"id":20039,"depth":86,"text":20040,"children":20154},[20155,20156],{"id":20057,"depth":126,"text":20058},{"id":20070,"depth":126,"text":20071},{"id":20077,"depth":86,"text":20078},{"id":20090,"depth":86,"text":20091},[613],"2014-11-28T10:52:11","On 22 November 2014, the NoSQL matters conference took place in\\nBarcelona at the Casa Convalescència, which is doubtless one of\\nthe most beautiful locations for a conference! The Casa was declared a Historical Artistic Monument in 1978, and a World\\nCultural Heritage Site in 1997, and these great halls are a great place for great\\nspeakers.","https://synyx.de/blog/time-series-data-is-the-the-new-big-data/",{},"/blog/time-series-data-is-the-the-new-big-data",{"title":19892,"description":20166},"On 22 November 2014, the NoSQL matters conference took place in\nBarcelona at the Casa Convalescència, which is doubtless one of\nthe most beautiful locations for a conference! The Casa was declared a Historical Artistic Monument in 1978, and a World\nCultural Heritage Site in 1997, and these great halls are a great place for great\nspeakers.","blog/time-series-data-is-the-the-new-big-data",[20169,20170,20171,20172,20173],"big-data","data-science","nosql14","nosqlmatters","time-series-databases","On 22 November 2014, the NoSQL matters conference took place in Barcelona at the Casa Convalescència, which is doubtless one of the most beautiful locations for a conference! The Casa…","TjyEHNiqN5_YzqUT-k2YU7jRmPtAZH1WQKtcKhSx31s",{"id":20177,"title":20178,"author":20179,"body":20180,"category":20498,"date":20499,"description":20500,"extension":617,"link":20501,"meta":20502,"navigation":499,"path":20503,"seo":20504,"slug":20184,"stem":20505,"tags":20506,"teaser":20512,"__hash__":20513},"blog/blog/the-qt-framework-solid-fun-in-many-languages.md","The Qt framework: solid fun in many languages",[13180],{"type":11,"value":20181,"toc":20496},[20182,20185,20188,20206,20212,20215,20218,20222,20225,20233,20239,20242,20249,20252,20256,20259,20267,20273,20276,20283,20286,20289,20381,20389,20393,20401,20404,20413,20422,20431,20440,20449,20458,20467,20476,20485,20494],[14,20183,20178],{"id":20184},"the-qt-framework-solid-fun-in-many-languages",[18,20186,20187],{},"Particularly to people using C++ and Python the Qt framework is probably quite well-known, as in these communities\nit’s one of the most-used frameworks for application development. For those who don’t know what Qt is or what it does:\nit’s a comprehensive LGPL-licensed framework providing cross-platform support for GUI, network, multimedia, database,\nsensors, graphics (OpenGL) and many other features. In this article I would like to give a quick overview of these.",[18,20189,20190,20191,8713,20194,8713,20197,20201,20202,20205],{},"While written in C++, Qt has many language bindings",[585,20192,2546],{"href":20193},"#sdfootnote1sym",[585,20195,2409],{"href":20196},"#sdfootnote2sym",[585,20198,20200],{"href":20199},"#sdfootnote3sym","3",",\nincluding for Python, Perl, Ada, Ruby, Java, BASIC, Go, C#, PHP, Lua and Haskell. Any application written in any of\nthese languages and using the Qt framework can be deployed unmodified on any of the supported\nplatforms",[585,20203,2762],{"href":20204},"#sdfootnote4sym"," – including all major desktop and mobile platforms – which makes it a popular framework\nfor many big organizations and companies. Some well-known applications written using Qt include Autodesk Maya, Altera\nQuartus, KDE, Google Earth, Skype, Spotify (Linux), Virtualbox and VLC.",[18,20207,20208],{},[2223,20209],{"alt":20210,"src":20211},"\"qt_imagecomposer_qt-creator\"","https://media.synyx.de/uploads//2014/09/qt_imagecomposer_qt-creator.jpg",[18,20213,20214],{},"Screenshot 1: Image Composition sample application running on top of Qt Creator IDE.",[18,20216,20217],{},"In addition to the straight Qt framework there is also the Qt Modeling Language (QML) component which can be used to\nrapidly create user interface-centric applications in a JavaScript-based, declarative language. It’s commonly used for\nmobile and embedded applications. A basic QML application can be enhanced using JavaScript code and feature anything\nfrom UI controls to a complete web browser widget (using the WebKit-based module).",[14,20219,20221],{"id":20220},"getting-started","Getting started",[18,20223,20224],{},"When I started using Qt in 2010 Qt 4.7 was the standard. Since then Qt has grown into its current form at version 5.3,\nwith a strong focus on JavaScript and mobile development (using the Qt Quick module, which defines QML), while the\noriginal C++ API also got a makeover. This didn’t change any fundamentals, however, mostly improving library\norganization and features such as accessibility in GUIs.",[18,20226,20227,20228,20232],{},"To quickly build a GUI application, one can use the provided Qt Creator IDE, which includes all of the tools to make any\ntype of application, including non-Qt-based ones. If one wanted to for example create a browser using the Webkit\nbrowser engine, a single class implementation would suffice, as in Qt’s Fancy Browser example",[585,20229,20231],{"href":20230},"#sdfootnote5sym","5",", which\ngoes one step further and even loads a JQuery instance into the JavaScript runtime to perform HTML manipulation.",[18,20234,20235],{},[2223,20236],{"alt":20237,"src":20238},"\"qt_fancybrowser\"","https://media.synyx.de/uploads//2014/09/qt_fancybrowser.jpg",[18,20240,20241],{},"Screenshot 2: Fancy Browser example application.",[18,20243,20244,20245,20248],{},"For a hobby project I took this basic concept and made a more full-featured browser",[585,20246,2397],{"href":20247},"#sdfootnote6sym",", writing a\ncustom cookie handler among other extensions to the basic Qt classes. With the foundation Qt provides it’s very easy to\nrapidly get started on a project, or to quickly prototype a concept without wasting hours on implementation details.",[18,20250,20251],{},"Whether one uses C++, Python, Ada or another language for which a complete wrapper exists, the basic principle doesn’t\nchange in implementing a Qt-based application. One always uses the same API and same concepts, just molded to fit the\nimplementing language.",[14,20253,20255],{"id":20254},"enter-qml","Enter QML",[18,20257,20258],{},"Even to long-time users of C++/Qt QML can seem quite confusing at first, mostly because of the confusion over what\nQML is and isn’t. In essence QML (Qt Modeling Language) is the name of the modeling language: a descriptive language\nusing which one can define user interface elements and their behavior. QML is part of Qt Quick, the UI creation kit\nwhich itself is part of the Qt framework. Finally, the runtime for QML is called Qt Declarative.",[18,20260,20261,20262,20266],{},"Places where QML is used include (outside of mobile/embedded) KDE and the Unity UI (as of version 8",[585,20263,20265],{"href":20264},"#sdfootnote7sym","7",")\nwhich is used by Ubuntu. The main motivations behind the use of a QML-based UI seem to revolve around the language and\nplatform agnostic nature of it. All one needs is the QML runtime whereby one can add JavaScript and C++ code for\nfurther functionality. Unity 8 uses QML to ease the cross-platform deployment across desktop and mobile devices (\nrunning Ubuntu Touch).",[18,20268,20269],{},[2223,20270],{"alt":20271,"src":20272},"\"Qt PhotoViewer sample\"","https://media.synyx.de/uploads//2014/09/qt_photo_viewer.jpg",[18,20274,20275],{},"Screenshot 3: Photo Viewer example. QML with minimal JavaScript.",[18,20277,20278,20279,20282],{},"The Photo Viewer QML example application",[585,20280,2667],{"href":20281},"#sdfootnote8sym"," on the Qt site is a good example of how much one can do\nwith just QML: this application allows one to define all views of the application with transitions, widgets and the\nXML-based model which retrieves image URLs from the Flickr public API. The JavaScript file is just used for some minor\nutility functions.",[18,20284,20285],{},"In theory one could extend the JavaScript side to include more or additional logic, and use a C++ extension for\nexample image processing or similar functionality. Where one puts the logic and which features are included would be\ndetermined by the available resources and intended languages. One can also use QML with just C++, or pure QML with no\nadditional languages. Many QML applications can be readily deployed on a mobile device as well.",[18,20287,20288],{},"QML isn’t just about static content either. Using Qt’s multimedia features one can for example quickly set up a video\nplayer:",[43,20290,20292],{"className":13786,"code":20291,"language":13788,"meta":48,"style":48},"import QtQuick 2.0\nimport QtMultimedia 5.0\nVideo {\n id: video\n width : 800\n height : 600\n source: \"video.avi\"\n \u003Ca class=\"broken_link\" href=\"http://qt-project.org/doc/qt-5/qml-qtquick-mousearea.html\">MouseArea\u003C/a> {\n anchors.fill: parent\n onClicked: {\n video.play()\n }\n }\n focus: true\n Keys.onSpacePressed: video.playbackState == MediaPlayer.PlayingState ? video.pause() : video.play()\n Keys.onLeftPressed: video.seek(video.position - 5000)\n Keys.onRightPressed: video.seek(video.position + 5000)\n}\n",[50,20293,20294,20299,20304,20309,20314,20319,20324,20329,20334,20339,20344,20349,20353,20357,20362,20367,20372,20377],{"__ignoreMap":48},[53,20295,20296],{"class":55,"line":56},[53,20297,20298],{},"import QtQuick 2.0\n",[53,20300,20301],{"class":55,"line":86},[53,20302,20303],{},"import QtMultimedia 5.0\n",[53,20305,20306],{"class":55,"line":126},[53,20307,20308],{},"Video {\n",[53,20310,20311],{"class":55,"line":163},[53,20312,20313],{}," id: video\n",[53,20315,20316],{"class":55,"line":186},[53,20317,20318],{}," width : 800\n",[53,20320,20321],{"class":55,"line":221},[53,20322,20323],{}," height : 600\n",[53,20325,20326],{"class":55,"line":242},[53,20327,20328],{}," source: \"video.avi\"\n",[53,20330,20331],{"class":55,"line":273},[53,20332,20333],{}," \u003Ca class=\"broken_link\" href=\"http://qt-project.org/doc/qt-5/qml-qtquick-mousearea.html\">MouseArea\u003C/a> {\n",[53,20335,20336],{"class":55,"line":279},[53,20337,20338],{}," anchors.fill: parent\n",[53,20340,20341],{"class":55,"line":496},[53,20342,20343],{}," onClicked: {\n",[53,20345,20346],{"class":55,"line":503},[53,20347,20348],{}," video.play()\n",[53,20350,20351],{"class":55,"line":509},[53,20352,3242],{},[53,20354,20355],{"class":55,"line":515},[53,20356,860],{},[53,20358,20359],{"class":55,"line":521},[53,20360,20361],{}," focus: true\n",[53,20363,20364],{"class":55,"line":527},[53,20365,20366],{}," Keys.onSpacePressed: video.playbackState == MediaPlayer.PlayingState ? video.pause() : video.play()\n",[53,20368,20369],{"class":55,"line":533},[53,20370,20371],{}," Keys.onLeftPressed: video.seek(video.position - 5000)\n",[53,20373,20374],{"class":55,"line":539},[53,20375,20376],{}," Keys.onRightPressed: video.seek(video.position + 5000)\n",[53,20378,20379],{"class":55,"line":545},[53,20380,282],{},[18,20382,20383,20384,20388],{},"This sample, taken from the Qt Video QML type documentation",[585,20385,20387],{"href":20386},"#sdfootnote9sym","9"," shows just how easy it is to set up a\nresponsive user interface with QML and to add elements which not only respond to user inputs, but can use video and\naudio as well.",[14,20390,20392],{"id":20391},"wrapping-up","Wrapping up",[18,20394,20395,20396,20400],{},"This article has barely scratched the surface of what Qt is capable of. The multi-threading, networking, multimedia,\ngraphics acceleration, storage-related and many other features are at least as interesting. Using the many sample\napplications on the Qt site",[585,20397,20399],{"href":20398},"#sdfootnote10sym","10"," it’s easy to get an idea of the possibilities, however. Simply\ndownload the current version of the libraries together with Qt Creator and browse through the examples in the Welcome\ntab of the IDE, or check them out online.",[18,20402,20403],{},"Finally, if anyone reading has experience with any of the language wrappers for Qt, please leave a comment. I’d be very\ninterested in hearing how well they work.",[18,20405,20406,20409],{},[585,20407,2546],{"href":20408},"#sdfootnote1anc",[585,20410,20411],{"href":20411,"rel":20412},"http://qt-project.org/wiki/Category:LanguageBindings",[589],[18,20414,20415,20418],{},[585,20416,2409],{"href":20417},"#sdfootnote2anc",[585,20419,20420],{"href":20420,"rel":20421},"http://en.wikipedia.org/wiki/List_of_language_bindings_for_Qt_4",[589],[18,20423,20424,20427],{},[585,20425,20200],{"href":20426},"#sdfootnote3anc",[585,20428,20429],{"href":20429,"rel":20430},"http://en.wikipedia.org/wiki/List_of_language_bindings_for_Qt_5",[589],[18,20432,20433,20436],{},[585,20434,2762],{"href":20435},"#sdfootnote4anc",[585,20437,20438],{"href":20438,"rel":20439},"http://qt-project.org/doc/qt-5/supported-platforms.html",[589],[18,20441,20442,20445],{},[585,20443,20231],{"href":20444},"#sdfootnote5anc",[585,20446,20447],{"href":20447,"rel":20448},"http://qt-project.org/doc/qt-5/qtwebkitexamples-webkitwidgets-fancybrowser-example.html",[589],[18,20450,20451,20454],{},[585,20452,2397],{"href":20453},"#sdfootnote6anc",[585,20455,20456],{"href":20456,"rel":20457},"http://mayaposch.com/wildfox.php",[589],[18,20459,20460,20463],{},[585,20461,20265],{"href":20462},"#sdfootnote7anc",[585,20464,20465],{"href":20465,"rel":20466},"https://unity.ubuntu.com/getinvolved/development/unity8/",[589],[18,20468,20469,20472],{},[585,20470,2667],{"href":20471},"#sdfootnote8anc",[585,20473,20474],{"href":20474,"rel":20475},"http://qt-project.org/doc/qt-5/qtquick-demos-photoviewer-example.html",[589],[18,20477,20478,20481],{},[585,20479,20387],{"href":20480},"#sdfootnote9anc",[585,20482,20483],{"href":20483,"rel":20484},"http://qt-project.org/doc/qt-5/qml-qtmultimedia-video.html",[589],[18,20486,20487,20490],{},[585,20488,20399],{"href":20489},"#sdfootnote10anc",[585,20491,20492],{"href":20492,"rel":20493},"http://qt-project.org/",[589],[607,20495,989],{},{"title":48,"searchDepth":86,"depth":86,"links":20497},[],[613],"2014-09-18T17:15:03","Particularly to people using C++ and Python the Qt framework is probably quite well-known, as in these communities\\nit’s one of the most-used frameworks for application development. For those who don’t know what Qt is or what it does:\\nit’s a comprehensive LGPL-licensed framework providing cross-platform support for GUI, network, multimedia, database,\\nsensors, graphics (OpenGL) and many other features. In this article I would like to give a quick overview of these.","https://synyx.de/blog/the-qt-framework-solid-fun-in-many-languages/",{},"/blog/the-qt-framework-solid-fun-in-many-languages",{"title":20178,"description":20187},"blog/the-qt-framework-solid-fun-in-many-languages",[13421,20507,7265,20508,20509,20510,20511],"cross-platform","mobile","qml","qt","qt-quick","Particularly to people using C++ and Python the Qt framework is probably quite well-known, as in these communities it’s one of the most-used frameworks for application development. For those who…","pEQ_OlddqNbA7CWP-ltLaMJp7EGQfBl6i6zBkOADnbs",{"id":20515,"title":20516,"author":20517,"body":20518,"category":21463,"date":21464,"description":20525,"extension":617,"link":21465,"meta":21466,"navigation":499,"path":21467,"seo":21468,"slug":21469,"stem":21470,"tags":21471,"teaser":21475,"__hash__":21476},"blog/blog/spock-test-well-and-prosper-from-the-unit-up-to-the-integration-level.md","Spock: Testing from the Unit up to the Integration Level",[13434],{"type":11,"value":20519,"toc":21461},[20520,20523,20526,20540,20553,20556,20559,20562,20565,20570,20593,20598,20691,20696,20762,20765,20770,20817,20820,20825,20957,20960,20975,20978,20983,21109,21114,21188,21209,21215,21222,21226,21240,21246,21250,21325,21330,21420,21431,21434,21437,21444,21451,21459],[14,20521,20516],{"id":20522},"spock-testing-from-the-unit-up-to-the-integration-level",[18,20524,20525],{},"There are a number of reasons to use the Spock testing framework:",[18,20527,20528,20529,20532,20533,11792,20536,20539],{},"First, tests – ",[573,20530,20531],{},"specifications"," in Spock speak – written in Spock are well structured, expressive and therefore provide\ngood readability. In addition, Spock has built-in features like ",[573,20534,20535],{},"data driven testing",[573,20537,20538],{},"interaction based testing"," (\nmocking). Data driven testing allows your test code to be reused, i.e. to be applied multiple times with different\nparameters.",[18,20541,20542,20543,20546,20547,20552],{},"Second, because Spock is a Groovy based DSL, specification code can become concise where the equivalent Java code would\nbe overly verbose. For example, Groovy provides native syntax for maps, lists and regular expressions. ",[573,20544,20545],{},"Closure\ncoercion"," helps providing stub implementations for one or more interface methods without having to write a stub class.\nAs Groovy and Java can freely be mixed together you can use any Java based library you like, or use Groovy based\nlibraries. For example\nthe ",[585,20548,20551],{"href":20549,"rel":20550},"https://web.archive.org/web/20150313003201/http://groovy.codehaus.org:80/modules/http-builder/home.html",[589],"HTTPBuilder","\nenhances the HttpComponents HttpClient by providing features like various builders & parsers and a streamlined REST\nclient.",[18,20554,20555],{},"Also the Spring framework supports Groovy and – not surprisingly – Spring TestContext framework works well with Spock:\napplication contexts can easily be made available to specifications via annotation, thus enabling integration testing at\nall levels.",[18,20557,20558],{},"Spock specifications can be run from an IDE just like normal JUnit tests and, last but not least, implementing them is a\ngreat opportunity to learn the Groovy language.",[18,20560,20561],{},"For demonstration purposes we’ll create a very simple Spring Boot web application that responds with string “prime” or\n“not prime” dependant on a number given by a request parameter. In case of errors the string “error” should be sent back\nto the client. Then we’ll create Spock specifications, both for unit and integration testing.",[18,20563,20564],{},"We start by defining a service interface, its implementation and a controller class:",[18,20566,20567],{},[50,20568,20569],{},"src/main/java/prime/service/PrimeService.java",[43,20571,20573],{"className":288,"code":20572,"language":290,"meta":48,"style":48},"\npublic interface PrimeService {\n boolean isPrime(int number);\n}\n\n",[50,20574,20575,20579,20584,20589],{"__ignoreMap":48},[53,20576,20577],{"class":55,"line":56},[53,20578,500],{"emptyLinePlaceholder":499},[53,20580,20581],{"class":55,"line":86},[53,20582,20583],{},"public interface PrimeService {\n",[53,20585,20586],{"class":55,"line":126},[53,20587,20588],{}," boolean isPrime(int number);\n",[53,20590,20591],{"class":55,"line":163},[53,20592,282],{},[18,20594,20595],{},[50,20596,20597],{},"src/main/java/prime/service/PrimeServiceImpl.java",[43,20599,20601],{"className":288,"code":20600,"language":290,"meta":48,"style":48},"\n@Service\npublic class PrimeServiceImpl implements PrimeService {\n @Override\n public boolean isPrime(int number) {\n if (number \u003C 0) {\n throw new IllegalArgumentException(\"argument must not be negative\");\n }\n if (number \u003C= 2) {\n return number == 2 ? true : false;\n }\n for (int i = 2; i \u003C Math.sqrt(number) + 1; i++) {\n if (number % i == 0) {\n return false;\n }\n }\n return true;\n }\n}\n\n",[50,20602,20603,20607,20612,20617,20621,20626,20631,20636,20640,20645,20650,20654,20659,20664,20669,20674,20678,20683,20687],{"__ignoreMap":48},[53,20604,20605],{"class":55,"line":56},[53,20606,500],{"emptyLinePlaceholder":499},[53,20608,20609],{"class":55,"line":86},[53,20610,20611],{},"@Service\n",[53,20613,20614],{"class":55,"line":126},[53,20615,20616],{},"public class PrimeServiceImpl implements PrimeService {\n",[53,20618,20619],{"class":55,"line":163},[53,20620,9049],{},[53,20622,20623],{"class":55,"line":186},[53,20624,20625],{}," public boolean isPrime(int number) {\n",[53,20627,20628],{"class":55,"line":221},[53,20629,20630],{}," if (number \u003C 0) {\n",[53,20632,20633],{"class":55,"line":242},[53,20634,20635],{}," throw new IllegalArgumentException(\"argument must not be negative\");\n",[53,20637,20638],{"class":55,"line":273},[53,20639,3242],{},[53,20641,20642],{"class":55,"line":279},[53,20643,20644],{}," if (number \u003C= 2) {\n",[53,20646,20647],{"class":55,"line":496},[53,20648,20649],{}," return number == 2 ? true : false;\n",[53,20651,20652],{"class":55,"line":503},[53,20653,3242],{},[53,20655,20656],{"class":55,"line":509},[53,20657,20658],{}," for (int i = 2; i \u003C Math.sqrt(number) + 1; i++) {\n",[53,20660,20661],{"class":55,"line":515},[53,20662,20663],{}," if (number % i == 0) {\n",[53,20665,20666],{"class":55,"line":521},[53,20667,20668],{}," return false;\n",[53,20670,20671],{"class":55,"line":527},[53,20672,20673],{}," }\n",[53,20675,20676],{"class":55,"line":533},[53,20677,3242],{},[53,20679,20680],{"class":55,"line":539},[53,20681,20682],{}," return true;\n",[53,20684,20685],{"class":55,"line":545},[53,20686,860],{},[53,20688,20689],{"class":55,"line":2414},[53,20690,282],{},[18,20692,20693],{},[50,20694,20695],{},"src/main/groovy/prime/web/PrimeController.groovy",[43,20697,20699],{"className":288,"code":20698,"language":290,"meta":48,"style":48},"\n@RestController\nclass PrimeController {\n @Autowired PrimeService primeService;\n @ExceptionHandler(Exception)\n String handleError() {\n 'error';\n }\n @RequestMapping('/prime')\n String isPrime(@RequestParam int n) {\n primeService.isPrime(n) ? 'prime' : 'not prime';\n }\n}\n\n",[50,20700,20701,20705,20710,20715,20720,20725,20730,20735,20739,20744,20749,20754,20758],{"__ignoreMap":48},[53,20702,20703],{"class":55,"line":56},[53,20704,500],{"emptyLinePlaceholder":499},[53,20706,20707],{"class":55,"line":86},[53,20708,20709],{},"@RestController\n",[53,20711,20712],{"class":55,"line":126},[53,20713,20714],{},"class PrimeController {\n",[53,20716,20717],{"class":55,"line":163},[53,20718,20719],{}," @Autowired PrimeService primeService;\n",[53,20721,20722],{"class":55,"line":186},[53,20723,20724],{}," @ExceptionHandler(Exception)\n",[53,20726,20727],{"class":55,"line":221},[53,20728,20729],{}," String handleError() {\n",[53,20731,20732],{"class":55,"line":242},[53,20733,20734],{}," 'error';\n",[53,20736,20737],{"class":55,"line":273},[53,20738,860],{},[53,20740,20741],{"class":55,"line":279},[53,20742,20743],{}," @RequestMapping('/prime')\n",[53,20745,20746],{"class":55,"line":496},[53,20747,20748],{}," String isPrime(@RequestParam int n) {\n",[53,20750,20751],{"class":55,"line":503},[53,20752,20753],{}," primeService.isPrime(n) ? 'prime' : 'not prime';\n",[53,20755,20756],{"class":55,"line":509},[53,20757,860],{},[53,20759,20760],{"class":55,"line":515},[53,20761,282],{},[18,20763,20764],{},"Since the application is based on Spring Boot we also add this class...",[18,20766,20767],{},[50,20768,20769],{},"src/main/java/prime/Application.java",[43,20771,20773],{"className":288,"code":20772,"language":290,"meta":48,"style":48},"\n@Configuration\n@EnableAutoConfiguration\n@ComponentScan\npublic class Application {\n public static void main(String[] args) {\n SpringApplication.run(Application.class, args);\n }\n}\n\n",[50,20774,20775,20779,20784,20789,20794,20799,20804,20809,20813],{"__ignoreMap":48},[53,20776,20777],{"class":55,"line":56},[53,20778,500],{"emptyLinePlaceholder":499},[53,20780,20781],{"class":55,"line":86},[53,20782,20783],{},"@Configuration\n",[53,20785,20786],{"class":55,"line":126},[53,20787,20788],{},"@EnableAutoConfiguration\n",[53,20790,20791],{"class":55,"line":163},[53,20792,20793],{},"@ComponentScan\n",[53,20795,20796],{"class":55,"line":186},[53,20797,20798],{},"public class Application {\n",[53,20800,20801],{"class":55,"line":221},[53,20802,20803],{}," public static void main(String[] args) {\n",[53,20805,20806],{"class":55,"line":242},[53,20807,20808],{}," SpringApplication.run(Application.class, args);\n",[53,20810,20811],{"class":55,"line":273},[53,20812,7384],{},[53,20814,20815],{"class":55,"line":279},[53,20816,282],{},[18,20818,20819],{},"... and a build script:",[18,20821,20822],{},[50,20823,20824],{},"src/build.gradle",[43,20826,20828],{"className":288,"code":20827,"language":290,"meta":48,"style":48},"\nbuildscript {\n repositories {\n mavenCentral()\n }\n dependencies {\n classpath(\"org.springframework.boot:spring-boot-gradle-plugin:1.1.6.RELEASE\")\n }\n}\napply plugin: 'groovy'\napply plugin: 'spring-boot'\njar {\n baseName = 'prime'\n version = '0.10.0'\n}\nrepositories {\n mavenCentral()\n}\ndependencies {\n compile(\"org.codehaus.groovy:groovy-all:2.3.6\")\n compile(\"org.springframework.boot:spring-boot-starter-jetty\")\n compile(\"org.springframework.boot:spring-boot-starter-web\") {\n exclude module: \"spring-boot-starter-tomcat\"\n }\n testCompile(\"org.springframework.boot:spring-boot-starter-test\")\n testCompile(\"org.spockframework:spock-core:0.7-groovy-2.0\")\n}\n\n",[50,20829,20830,20834,20839,20844,20849,20853,20858,20863,20867,20871,20876,20881,20886,20891,20896,20900,20905,20910,20914,20919,20924,20929,20934,20939,20943,20948,20953],{"__ignoreMap":48},[53,20831,20832],{"class":55,"line":56},[53,20833,500],{"emptyLinePlaceholder":499},[53,20835,20836],{"class":55,"line":86},[53,20837,20838],{},"buildscript {\n",[53,20840,20841],{"class":55,"line":126},[53,20842,20843],{}," repositories {\n",[53,20845,20846],{"class":55,"line":163},[53,20847,20848],{}," mavenCentral()\n",[53,20850,20851],{"class":55,"line":186},[53,20852,7384],{},[53,20854,20855],{"class":55,"line":221},[53,20856,20857],{}," dependencies {\n",[53,20859,20860],{"class":55,"line":242},[53,20861,20862],{}," classpath(\"org.springframework.boot:spring-boot-gradle-plugin:1.1.6.RELEASE\")\n",[53,20864,20865],{"class":55,"line":273},[53,20866,7384],{},[53,20868,20869],{"class":55,"line":279},[53,20870,282],{},[53,20872,20873],{"class":55,"line":496},[53,20874,20875],{},"apply plugin: 'groovy'\n",[53,20877,20878],{"class":55,"line":503},[53,20879,20880],{},"apply plugin: 'spring-boot'\n",[53,20882,20883],{"class":55,"line":509},[53,20884,20885],{},"jar {\n",[53,20887,20888],{"class":55,"line":515},[53,20889,20890],{}," baseName = 'prime'\n",[53,20892,20893],{"class":55,"line":521},[53,20894,20895],{}," version = '0.10.0'\n",[53,20897,20898],{"class":55,"line":527},[53,20899,282],{},[53,20901,20902],{"class":55,"line":533},[53,20903,20904],{},"repositories {\n",[53,20906,20907],{"class":55,"line":539},[53,20908,20909],{}," mavenCentral()\n",[53,20911,20912],{"class":55,"line":545},[53,20913,282],{},[53,20915,20916],{"class":55,"line":2414},[53,20917,20918],{},"dependencies {\n",[53,20920,20921],{"class":55,"line":2426},[53,20922,20923],{}," compile(\"org.codehaus.groovy:groovy-all:2.3.6\")\n",[53,20925,20926],{"class":55,"line":2438},[53,20927,20928],{}," compile(\"org.springframework.boot:spring-boot-starter-jetty\")\n",[53,20930,20931],{"class":55,"line":2451},[53,20932,20933],{}," compile(\"org.springframework.boot:spring-boot-starter-web\") {\n",[53,20935,20936],{"class":55,"line":2459},[53,20937,20938],{}," exclude module: \"spring-boot-starter-tomcat\"\n",[53,20940,20941],{"class":55,"line":2470},[53,20942,7384],{},[53,20944,20945],{"class":55,"line":2476},[53,20946,20947],{}," testCompile(\"org.springframework.boot:spring-boot-starter-test\")\n",[53,20949,20950],{"class":55,"line":2484},[53,20951,20952],{}," testCompile(\"org.spockframework:spock-core:0.7-groovy-2.0\")\n",[53,20954,20955],{"class":55,"line":2490},[53,20956,282],{},[18,20958,20959],{},"The Groovy plugin handles mixed Groovy and Java code in the project. Not only is our controller class written in Groovy;\nthe specifications for unit and integration testing will be too.",[18,20961,20962,20963,20966,20967,20970,20971,20974],{},"If Groovy is used in production code we have to include the ",[27,20964,20965],{},"groovy-all"," dependency to the ",[50,20968,20969],{},"compile"," configuration,\notherwise this dependency should be added to the ",[50,20972,20973],{},"testCompile"," configuration.",[18,20976,20977],{},"Now we write unit specifications which verify the correctness of service implementation and controller:",[18,20979,20980],{},[50,20981,20982],{},"src/test/groovy/prime/service/PrimeServiceImplSpec.groovy",[43,20984,20986],{"className":288,"code":20985,"language":290,"meta":48,"style":48},"\nclass PrimeServiceImplSpec extends Specification {\n PrimeServiceImpl sut = new PrimeServiceImpl();\n def \"test if the given number is prime\"() {\n expect:\n sut.isPrime(n) == prime\n where:\n n | prime\n 0 | false\n 1 | false\n 2 | true\n 3 | true\n 4 | false\n 5 | true\n 6 | false\n 7 | true\n }\n def \"check method argument constraints\"() {\n when:\n sut.isPrime(-1)\n then:\n def e = thrown(IllegalArgumentException)\n e.message == 'argument must not be negative'\n }\n}\n\n",[50,20987,20988,20992,20997,21002,21007,21012,21017,21022,21027,21032,21037,21042,21047,21052,21057,21062,21067,21071,21076,21081,21086,21091,21096,21101,21105],{"__ignoreMap":48},[53,20989,20990],{"class":55,"line":56},[53,20991,500],{"emptyLinePlaceholder":499},[53,20993,20994],{"class":55,"line":86},[53,20995,20996],{},"class PrimeServiceImplSpec extends Specification {\n",[53,20998,20999],{"class":55,"line":126},[53,21000,21001],{}," PrimeServiceImpl sut = new PrimeServiceImpl();\n",[53,21003,21004],{"class":55,"line":163},[53,21005,21006],{}," def \"test if the given number is prime\"() {\n",[53,21008,21009],{"class":55,"line":186},[53,21010,21011],{}," expect:\n",[53,21013,21014],{"class":55,"line":221},[53,21015,21016],{}," sut.isPrime(n) == prime\n",[53,21018,21019],{"class":55,"line":242},[53,21020,21021],{}," where:\n",[53,21023,21024],{"class":55,"line":273},[53,21025,21026],{}," n | prime\n",[53,21028,21029],{"class":55,"line":279},[53,21030,21031],{}," 0 | false\n",[53,21033,21034],{"class":55,"line":496},[53,21035,21036],{}," 1 | false\n",[53,21038,21039],{"class":55,"line":503},[53,21040,21041],{}," 2 | true\n",[53,21043,21044],{"class":55,"line":509},[53,21045,21046],{}," 3 | true\n",[53,21048,21049],{"class":55,"line":515},[53,21050,21051],{}," 4 | false\n",[53,21053,21054],{"class":55,"line":521},[53,21055,21056],{}," 5 | true\n",[53,21058,21059],{"class":55,"line":527},[53,21060,21061],{}," 6 | false\n",[53,21063,21064],{"class":55,"line":533},[53,21065,21066],{}," 7 | true\n",[53,21068,21069],{"class":55,"line":539},[53,21070,860],{},[53,21072,21073],{"class":55,"line":545},[53,21074,21075],{}," def \"check method argument constraints\"() {\n",[53,21077,21078],{"class":55,"line":2414},[53,21079,21080],{}," when:\n",[53,21082,21083],{"class":55,"line":2426},[53,21084,21085],{}," sut.isPrime(-1)\n",[53,21087,21088],{"class":55,"line":2438},[53,21089,21090],{}," then:\n",[53,21092,21093],{"class":55,"line":2451},[53,21094,21095],{}," def e = thrown(IllegalArgumentException)\n",[53,21097,21098],{"class":55,"line":2459},[53,21099,21100],{}," e.message == 'argument must not be negative'\n",[53,21102,21103],{"class":55,"line":2470},[53,21104,860],{},[53,21106,21107],{"class":55,"line":2476},[53,21108,282],{},[18,21110,21111],{},[50,21112,21113],{},"src/test/groovy/prime/web/PrimeControllerSpec.groovy",[43,21115,21117],{"className":288,"code":21116,"language":290,"meta":48,"style":48},"\nclass PrimeControllerSpec extends Specification {\n def \"returns string 'prime' when service detects prime number\"() {\n int p = 3\n def stub = { it == p ? true : false }\n expect:\n new PrimeController(primeService: stub).isPrime(p) == 'prime'\n }\n def \"returns 'not prime' when service detects non-prime number\"() {\n int n = 4\n def stub = { it == n ? false : true }\n expect:\n new PrimeController(primeService: stub).isPrime(n) == 'not prime'\n }\n}\n\n",[50,21118,21119,21123,21128,21133,21138,21143,21147,21152,21156,21161,21166,21171,21175,21180,21184],{"__ignoreMap":48},[53,21120,21121],{"class":55,"line":56},[53,21122,500],{"emptyLinePlaceholder":499},[53,21124,21125],{"class":55,"line":86},[53,21126,21127],{},"class PrimeControllerSpec extends Specification {\n",[53,21129,21130],{"class":55,"line":126},[53,21131,21132],{}," def \"returns string 'prime' when service detects prime number\"() {\n",[53,21134,21135],{"class":55,"line":163},[53,21136,21137],{}," int p = 3\n",[53,21139,21140],{"class":55,"line":186},[53,21141,21142],{}," def stub = { it == p ? true : false }\n",[53,21144,21145],{"class":55,"line":221},[53,21146,21011],{},[53,21148,21149],{"class":55,"line":242},[53,21150,21151],{}," new PrimeController(primeService: stub).isPrime(p) == 'prime'\n",[53,21153,21154],{"class":55,"line":273},[53,21155,860],{},[53,21157,21158],{"class":55,"line":279},[53,21159,21160],{}," def \"returns 'not prime' when service detects non-prime number\"() {\n",[53,21162,21163],{"class":55,"line":496},[53,21164,21165],{}," int n = 4\n",[53,21167,21168],{"class":55,"line":503},[53,21169,21170],{}," def stub = { it == n ? false : true }\n",[53,21172,21173],{"class":55,"line":509},[53,21174,21011],{},[53,21176,21177],{"class":55,"line":515},[53,21178,21179],{}," new PrimeController(primeService: stub).isPrime(n) == 'not prime'\n",[53,21181,21182],{"class":55,"line":521},[53,21183,860],{},[53,21185,21186],{"class":55,"line":527},[53,21187,282],{},[18,21189,21190,21191,21194,21195,21198,21199,21201,21202,21205,21206,986],{},"The first ",[573,21192,21193],{},"feature"," method in ",[50,21196,21197],{},"PrimeServiceImplSpec"," shows how Spocks ",[573,21200,20535],{}," concept works and in\n",[50,21203,21204],{},"PrimeControllerSpec"," we create service stubs by ",[573,21207,21208],{},"closure coercion",[18,21210,21211,21212,21214],{},"Spock does also provide a means for ",[573,21213,20538],{},", i.e. mocking and stubbing.",[18,21216,21217,21218,21221],{},"Before we implement an integration specification to verify the applications behaviour, we have to add another dependency\nin the build script. The ",[27,21219,21220],{},"spock-spring"," dependency enables to use the Spring TestContext framework together with\nSpock which is required for our integration specification.",[18,21223,21224],{},[50,21225,20824],{},[43,21227,21229],{"className":288,"code":21228,"language":290,"meta":48,"style":48},"\ntestCompile(\"org.spockframework:spock-spring:0.7-groovy-2.0\")\n\n",[50,21230,21231,21235],{"__ignoreMap":48},[53,21232,21233],{"class":55,"line":56},[53,21234,500],{"emptyLinePlaceholder":499},[53,21236,21237],{"class":55,"line":86},[53,21238,21239],{},"testCompile(\"org.spockframework:spock-spring:0.7-groovy-2.0\")\n",[18,21241,21242,21243],{},"In order to separate the long running integration specifications from the unit specifications, we modify the build\nscript by defining a corresponding sourceSet, associated configurations and task. Integration testing can now be\ntriggered with ",[50,21244,21245],{},"gradle integTest",[18,21247,21248],{},[50,21249,20824],{},[43,21251,21253],{"className":288,"code":21252,"language":290,"meta":48,"style":48},"\nsourceSets {\n integTest {\n compileClasspath += main.output + test.output\n runtimeClasspath += main.output + test.output\n }\n}\nconfigurations {\n integTestCompile.extendsFrom testCompile\n integTestRuntime.extendsFrom testRuntime\n}\ntask integTest(type: Test) {\n testClassesDir = sourceSets.integTest.output.classesDir\n classpath = sourceSets.integTest.runtimeClasspath\n}\n\n",[50,21254,21255,21259,21264,21269,21274,21279,21283,21287,21292,21297,21302,21306,21311,21316,21321],{"__ignoreMap":48},[53,21256,21257],{"class":55,"line":56},[53,21258,500],{"emptyLinePlaceholder":499},[53,21260,21261],{"class":55,"line":86},[53,21262,21263],{},"sourceSets {\n",[53,21265,21266],{"class":55,"line":126},[53,21267,21268],{}," integTest {\n",[53,21270,21271],{"class":55,"line":163},[53,21272,21273],{}," compileClasspath += main.output + test.output\n",[53,21275,21276],{"class":55,"line":186},[53,21277,21278],{}," runtimeClasspath += main.output + test.output\n",[53,21280,21281],{"class":55,"line":221},[53,21282,860],{},[53,21284,21285],{"class":55,"line":242},[53,21286,282],{},[53,21288,21289],{"class":55,"line":273},[53,21290,21291],{},"configurations {\n",[53,21293,21294],{"class":55,"line":279},[53,21295,21296],{}," integTestCompile.extendsFrom testCompile\n",[53,21298,21299],{"class":55,"line":496},[53,21300,21301],{}," integTestRuntime.extendsFrom testRuntime\n",[53,21303,21304],{"class":55,"line":503},[53,21305,282],{},[53,21307,21308],{"class":55,"line":509},[53,21309,21310],{},"task integTest(type: Test) {\n",[53,21312,21313],{"class":55,"line":515},[53,21314,21315],{}," testClassesDir = sourceSets.integTest.output.classesDir\n",[53,21317,21318],{"class":55,"line":521},[53,21319,21320],{}," classpath = sourceSets.integTest.runtimeClasspath\n",[53,21322,21323],{"class":55,"line":527},[53,21324,282],{},[18,21326,21327],{},[50,21328,21329],{},"src/integTest/groovy/prime/PrimeSpec.groovy",[43,21331,21333],{"className":288,"code":21332,"language":290,"meta":48,"style":48},"\n@ContextConfiguration(loader = SpringApplicationContextLoader, classes = Application)\n@WebAppConfiguration\n@IntegrationTest\nclass PrimeSpec extends Specification {\n @Value('${local.server.port}')\n int port;\n def \"server answers with 'prime' or 'not prime' or 'error'\"() {\n expect:\n \"http://localhost:$port/prime?n=$n\"\n .toURL().text == response\n where:\n n | response\n 23 | 'prime'\n 42 | 'not prime'\n -1 | 'error'\n }\n}\n\n",[50,21334,21335,21339,21344,21349,21354,21359,21364,21369,21374,21378,21383,21388,21392,21397,21402,21407,21412,21416],{"__ignoreMap":48},[53,21336,21337],{"class":55,"line":56},[53,21338,500],{"emptyLinePlaceholder":499},[53,21340,21341],{"class":55,"line":86},[53,21342,21343],{},"@ContextConfiguration(loader = SpringApplicationContextLoader, classes = Application)\n",[53,21345,21346],{"class":55,"line":126},[53,21347,21348],{},"@WebAppConfiguration\n",[53,21350,21351],{"class":55,"line":163},[53,21352,21353],{},"@IntegrationTest\n",[53,21355,21356],{"class":55,"line":186},[53,21357,21358],{},"class PrimeSpec extends Specification {\n",[53,21360,21361],{"class":55,"line":221},[53,21362,21363],{}," @Value('${local.server.port}')\n",[53,21365,21366],{"class":55,"line":242},[53,21367,21368],{}," int port;\n",[53,21370,21371],{"class":55,"line":273},[53,21372,21373],{}," def \"server answers with 'prime' or 'not prime' or 'error'\"() {\n",[53,21375,21376],{"class":55,"line":279},[53,21377,21011],{},[53,21379,21380],{"class":55,"line":496},[53,21381,21382],{}," \"http://localhost:$port/prime?n=$n\"\n",[53,21384,21385],{"class":55,"line":503},[53,21386,21387],{}," .toURL().text == response\n",[53,21389,21390],{"class":55,"line":509},[53,21391,21021],{},[53,21393,21394],{"class":55,"line":515},[53,21395,21396],{}," n | response\n",[53,21398,21399],{"class":55,"line":521},[53,21400,21401],{}," 23 | 'prime'\n",[53,21403,21404],{"class":55,"line":527},[53,21405,21406],{}," 42 | 'not prime'\n",[53,21408,21409],{"class":55,"line":533},[53,21410,21411],{}," -1 | 'error'\n",[53,21413,21414],{"class":55,"line":539},[53,21415,860],{},[53,21417,21418],{"class":55,"line":545},[53,21419,282],{},[18,21421,21422,21423,21426,21427,21430],{},"In ",[50,21424,21425],{},"PrimeSpec"," the Spring Boot annotation ",[50,21428,21429],{},"@IntegrationTest"," causes the embedded application server to start. As an\nalternative we could use MockMvc to verify application response. Integration testing with MockMvc doesn't require a\nrunning application server.",[18,21432,21433],{},"To summarize, the Spock testing famework is a good example how Groovy can help Java developers. By writing Spock\nspecifications, your test code - whether on the unit oder the integration level - can become concise and expressive.\nIntegration into the build process is easy and your favorite IDE will handle specifications just like regular JUnit\ntests.",[18,21435,21436],{},"Links:",[18,21438,21439],{},[585,21440,21443],{"href":21441,"rel":21442},"https://code.google.com/p/spock/wiki/SpockBasics/",[589],"SpockBasics - Anatomy of a Spock specification",[18,21445,21446],{},[585,21447,21450],{"href":21448,"rel":21449},"http://spock-framework.readthedocs.org/en/latest/",[589],"Spock Framework Reference Documentation",[18,21452,21453,21458],{},[585,21454,21457],{"href":21455,"rel":21456},"http://de.slideshare.net/kousen/spock-friendly-testing",[589],"Spock: Test Well and Prosper"," by Ken Kousen",[607,21460,989],{},{"title":48,"searchDepth":86,"depth":86,"links":21462},[],[613],"2014-09-15T20:19:31","https://synyx.de/blog/spock-test-well-and-prosper-from-the-unit-up-to-the-integration-level/",{},"/blog/spock-test-well-and-prosper-from-the-unit-up-to-the-integration-level",{"title":20516,"description":20525},"spock-test-well-and-prosper-from-the-unit-up-to-the-integration-level","blog/spock-test-well-and-prosper-from-the-unit-up-to-the-integration-level",[21472,21473,1010,21474],"groovy","spock","test","There are a number of reasons to use the Spock testing framework: First, tests – specifications in Spock speak – written in Spock are well structured, expressive and therefore provide…","PUbBjgfCyqC5t0kqqAl71ChX_YmSeT1Y0stRAuBBZHo",{"id":21478,"title":21479,"author":21480,"body":21481,"category":21584,"date":21585,"description":48,"extension":617,"link":21586,"meta":21587,"navigation":499,"path":21588,"seo":21589,"slug":21485,"stem":21590,"tags":21591,"teaser":21597,"__hash__":21598},"blog/blog/code-reviews.md","Code-Reviews",[12981],{"type":11,"value":21482,"toc":21579},[21483,21486,21492,21501,21504,21508,21511,21522,21532,21541,21544,21547,21551,21554,21557,21566,21576],[14,21484,21479],{"id":21485},"code-reviews",[649,21487,21489],{"id":21488},"zeig-mir-deinen-code-und-ich-sage-dir-wer-du-bist",[573,21490,21491],{},"Zeig’ mir Deinen Code und ich sage Dir wer Du bist.",[18,21493,21494,21495,986],{},"Oftmals kommen Unternehmen mit der Bitte um einen Code-Review auf uns zu. Gründe dafür gibt es viele, jedoch dreht es\nsich meistens um schlechte Erweiter- und Wartbarkeit der Software und in der Konsequenz um eine langsame\nEntwicklungsgeschwindigkeit (höhere Kosten). Häufig sind diese Anwendungen dann neue Patienten für\ndie ",[585,21496,21500],{"href":21497,"rel":21498,"title":21499},"http://www.synyx.de/leistungen/code_clinic/",[589],"synyx' code clinic","Code-Clinic",[18,21502,21503],{},"Aus unserer Sicht kann man diese Probleme oft von unten heraus aus dem Code angehen. Ein erster Schritt hierfür ist ein\nexterner Code-Review. Eine (nicht unbedingt gegensätzliche) Alternative wäre ein verwandter und umfänglicher\nArchitektur-Review, aber das ist eine Geschichte für einen anderen Tag.",[649,21505,21507],{"id":21506},"vorgehensweise","Vorgehensweise",[18,21509,21510],{},"Wenn wir mit einem Kunden einen Code-Review angehen so folgt zunächst – wie bei fast allen unseren\nBeratungsleistungen – ein intensives Gespräch mit dem Auftraggeber. Hierbei werden zentrale Fragestellungen,\nAnforderungen und Ziele geklärt:",[577,21512,21513,21516,21519],{},[580,21514,21515],{},"“Wo drückt der Schuh besonders?”",[580,21517,21518],{},"“Was sind Ihre Erwartungen an den Review?”",[580,21520,21521],{},"“Welche wichtigen Stakeholder sind beteiligt und was sind die Rahmenbedingungen?”",[18,21523,21524,21525,21531],{},"In der Regel sind anschließend noch weitere Interviews und Gespräche nötig. Auch wenn Code oftmals für sich steht,\nkönnen Mitglieder des Entwicklungsteams und deren direktes Umfeld ebenfalls wichtige Hinweise und Einstiegspunkte für\nden Review geben. Außerdem bevorzugen wir im Zuge\nunserer ",[585,21526,21530],{"href":21527,"rel":21528,"title":21529},"http://www.synyx.de/unternehmen/vision_mission/",[589],"Vision und Mission","Firmenphilosophie"," ein offenes und\ntransparentes Vorgehen im Gegensatz zu reinen “undercover Aktionen”.",[18,21533,21534,21535,21540],{},"Sobald alle notwendigen Gespräche geführt sind, ziehen wir uns mit dem Quellcode des Projekts zurück und analysieren\nihn. Geeignete Einstiegspunkte kristallisieren sich oft bereits bei den Vorgesprächen heraus. Zusätzlich liefern\nTool-gestützte Analysen (beispielsweise durch ",[585,21536,9322],{"href":21537,"rel":21538,"title":21539},"http://www.sonarqube.org/",[589],"SonarQube zur Codeanalyse",")\nsinnvolle Ansatzpunkte. Nicht zuletzt lernt man mit der Zeit, Probleme und Schwachstellen im Quellcode schnell zu\nerkennen.",[18,21542,21543],{},"Je nach Projektumfang werden die gewonnenen Erkenntnisse parallel durch Rückfragen beim Team vertieft und gestärkt und\nerste identifizierte Probleme werden bestätigt oder entkräftet.",[18,21545,21546],{},"Am Ende des Code-Reviews steht die Zusammenfassung der Ergebnisse und deren Präsentation. Je nach Wunsch kann dies\npersönlich (z.B. in Workshops), schriftlich durch ein ausgearbeitetes Review-Dokument oder auch auf andere Arten\ngeschehen. In der Regel werden die identifizierten Themen dargestellt, bewertet und mit Verbesserungsvorschlägen und\nLösungsansätzen versehen. Es versteht sich von selbst, dass wir unsere Kunden auf Wunsch auch bei der Umsetzung dieser\nLösungen unterstützen.",[649,21548,21550],{"id":21549},"ergebnisse","Ergebnisse",[18,21552,21553],{},"Auch wenn jedes Projekt, jedes Team und jeder Code anders ist, existieren oft sehr ähnliche Probleme.",[18,21555,21556],{},"So treffen wir beispielsweise regelmäßig auf einen inkonsistenten Mix verschiedenster Entwicklungsstile, Formatierungen\nund Bezeichnungen. Dies führt zu schlechter Lesbarkeit und einer unübersichtlichen Codebasis. Neue Entwickler finden\nsich dort nur schwer zurecht und auch auf die Geschwindigkeit von bestehenden Entwicklern hat dies erheblichen Einfluss.",[18,21558,21559,21560,21565],{},"Ein anderes, häufiges Problem ist die schlechte Struktur und Testbarkeit des bestehenden Codes. Diese resultiert oft aus\nder Verletzung verschiedener, grundlegender Prinzipien der Softwareentwicklung (siehe\nauch ",[585,21561,21564],{"href":21562,"rel":21563},"http://butunclebob.com/ArticleS.UncleBob.PrinciplesOfOod",[589],"SOLID",") und führt letztlich zu monolithischen und\naufwändig wartbaren Anwendungen.",[18,21567,21568,21569,21575],{},"Auch die erarbeiteten Lösungsstrategien haben oftmals Gemeinsamkeiten. So können häufig Tools helfen: Eine\nautomatisierte Toolchain von der Formatierung, über alle Arten\nvon ",[585,21570,21574],{"href":21571,"rel":21572,"title":21573},"http://martinfowler.com/bliki/TestPyramid.html",[589],"Test Pyramide","Testing"," und Analysen bis hin zum Deployment in\nProduktion kann hier große Wirkung zeigen. Andere Maßnahmen können beispielsweise intensive Workshops oder Schulungen\ndes Teams mit Fokus auf die beim Review identifizierten Aspekte sein.",[18,21577,21578],{},"Code-Reviews sind nicht nur im Nachgang bei Problemen nützlich sondern können bereits vorher zur Überprüfung der\nCode-Qualität als regelmäßiges Quality-Gate zum Einsatz kommen. Wir verwenden dieses Qualitätssicherungsinstrument\nselbst in unseren Projekten und bei unseren Teams. Meist übernimmt ein erfahrener Kollege aus einem anderen Team den\nReview. Dies ist eine Methode, wie wir die Code-Qualität in unseren eigenen Projekten optimieren und stellt eine\nwirkungsvolle Ergänzung von Team-internen Reviews dar.",{"title":48,"searchDepth":86,"depth":86,"links":21580},[21581,21582,21583],{"id":21488,"depth":126,"text":21491},{"id":21506,"depth":126,"text":21507},{"id":21549,"depth":126,"text":21550},[613],"2014-08-22T10:30:06","https://synyx.de/blog/code-reviews/",{},"/blog/code-reviews",{"title":21479,"description":48},"blog/code-reviews",[50,21592,21593,21594,21595,21596,21474],"code-clinic","code-qualitat","review","solid","sonarqube","Zeig’ mir Deinen Code und ich sage Dir wer Du bist. Oftmals kommen Unternehmen mit der Bitte um einen Code-Review auf uns zu. Gründe dafür gibt es viele, jedoch dreht…","Qsyk1Z8Y7U5TchROIcmFGONn1yQ2F5yLeUoyxJr7GHc",{"id":21600,"title":21601,"author":21602,"body":21603,"category":21717,"date":21718,"description":21612,"extension":617,"link":21719,"meta":21720,"navigation":499,"path":21721,"seo":21722,"slug":21607,"stem":21723,"tags":21724,"teaser":21730,"__hash__":21731},"blog/blog/when-your-tooling-is-fooling-you-code-review-and-continuous-integration-with-gerrit-jenkins-done-right.md","When your tooling is fooling you. Code review and continuous integration with Gerrit & Jenkins done right.",[12282],{"type":11,"value":21604,"toc":21715},[21605,21608,21613,21616,21619,21622,21625,21628,21631,21634,21639,21642,21645,21648,21651,21654,21659,21662,21669,21672,21681,21684,21691,21694,21698,21701,21712],[14,21606,21601],{"id":21607},"when-your-tooling-is-fooling-you-code-review-and-continuous-integration-with-gerrit-jenkins-done-right",[18,21609,21610],{},[573,21611,21612],{},"tl;dr: When you are using Gerrit and Jenkins on the same machine, know what you’re doing!",[18,21614,21615],{},"In a recent project we decided to increase code quality by introducing Gerrit as Code Review Tool.",[18,21617,21618],{},"The configuration looks as follows:",[18,21620,21621],{},"Next to a colleague who reviews the patchset, we created a dedicated Jenkins job which verfies the patchset by building\nthe project with the usual maven build configuration “mvn clean install” on the same machine. Only when both the\nreviewer and the ci server accept the patchset, it will be merged into our git repository.",[18,21623,21624],{},"After a successful merge of the patchset another jenkins job is triggered for deployment purpose.",[18,21626,21627],{},"That job is not surprisingly configured with “ mvn clean install -U”.",[18,21629,21630],{},"Meaning Jenkins is cleaning up the working directory and building the project by using the newest snapshots and/or\nreleases.",[18,21632,21633],{},"Last days we encounterd a problem with our setup. Surprisingly projects faild to build with the unexpected reason of\nincorrect usage of code in an artifact which in the meanwhile wasn’t changed. There have been changesets in Gerrit but\nsince they haven’t been reviewed and merged yet, they should not be in the artifact used by other projects.",[18,21635,21636],{},[27,21637,21638],{},"So whats going on right here?",[18,21640,21641],{},"Analysing the setup we came across the usage of the “-U” Parameter of Maven. The Manual says:",[18,21643,21644],{},"`-U,--update-snapshots",[18,21646,21647],{},"Forces a check for updated releases and snapshots on remote repositories`",[18,21649,21650],{},"At the first glance it seems to be what we want our Jenkins job to do. Checking for the newest dependencies before\nbuilding a project and deploying it into our repository. But in combination with the Gerrit Jenkins job runnning on the\nsame server, which verifies every patchset pushed to Gerrit we introduced an epic flaw.",[18,21652,21653],{},"The install plugin of maven puts every built artifact into the local repository which by definition is of course the\nnewest artifact you can get. So every project using this dependency will take that artifact, even when configured with\nthe “-U” parameter, which only checks if the artifact in the remote repository is newer. The attentive reader knows why\nit is not.",[18,21655,21656],{},[27,21657,21658],{},"So whats the solution?",[18,21660,21661],{},"There are three possibilities to overcome the flaw:",[18,21663,21664,21665,21668],{},"Of course you may use ",[27,21666,21667],{},"dedicated server"," for both Jenkins and Gerrit. Not sharing the local repository avoids getting\nin trouble with artifacts, which are temporary and not ready for public usage.",[18,21670,21671],{},"Not only because of the costs, also the higher administrative effort might be a reason to look for other solutions.",[18,21673,21674,21675,21680],{},"Maven ships the goal dependency:* *",[585,21676,21679],{"href":21677,"rel":21678,"title":21679},"http://maven.apache.org/plugins/maven-dependency-plugin/examples/purging-local-repository.html",[589],"purge-local-repository","\n** within the maven-dependency-plugin, allowing you to remove all dependencies from the local maven repository.\nConfigured in the process-sources phase it would solve the problem in our case. That solution kind of protects your\nproject from using dirty artifacts.",[18,21682,21683],{},"Howerver this unfortunately removes the symptoms, but not the cause.",[18,21685,21686,21687,21690],{},"There is an other solution which is easier than you might thought. Just configure the Gerrit Jenkins job with “mvn clean\n",[27,21688,21689],{},"package","”. This is what we actually want that job to do. It verifies the patchset by building the project without\nputting that temporary and half-baked version of the artifact into the local repository.",[18,21692,21693],{},"Don’t forget to initially clean up the local repository if you switch from ‘install’ to ‘package’, as there still might\nbe an unwanted version of the artifact.",[18,21695,21696],{},[27,21697,12384],{},[18,21699,21700],{},"Let me point out the conclusion in three simple bullet points:",[577,21702,21703,21706,21709],{},[580,21704,21705],{},"Know your artifact lifecycle and its relevance as dependency",[580,21707,21708],{},"Be careful with different tools running on the same machine sharing resources",[580,21710,21711],{},"Use Gerrit! Beside of our fail in the configuration it for sure increased our code quality and distributed knowledge\nof the codebase in our team",[18,21713,21714],{},"Did you have similar problems with that setup? Or other solutions? Don’t hesitate to comment your experience.",{"title":48,"searchDepth":86,"depth":86,"links":21716},[],[613],"2014-06-30T13:44:14","https://synyx.de/blog/when-your-tooling-is-fooling-you-code-review-and-continuous-integration-with-gerrit-jenkins-done-right/",{},"/blog/when-your-tooling-is-fooling-you-code-review-and-continuous-integration-with-gerrit-jenkins-done-right",{"title":21601,"description":21612},"blog/when-your-tooling-is-fooling-you-code-review-and-continuous-integration-with-gerrit-jenkins-done-right",[21725,21726,21727,21728,21729,10891],"code-quality","code-review","continuous-integration","gerrit","jenkins","tl;dr: When you are using Gerrit and Jenkins on the same machine, know what you’re doing! In a recent project we decided to increase code quality by introducing Gerrit as…","OncgbnZKLS7aSdKb4XlvE-OAdVbi8I11p7Habn0al-o",{"id":21733,"title":21734,"author":21735,"body":21736,"category":22420,"date":22421,"description":22422,"extension":617,"link":22423,"meta":22424,"navigation":499,"path":22425,"seo":22426,"slug":21740,"stem":22428,"tags":22429,"teaser":22437,"__hash__":22438},"blog/blog/a-very-brief-history-of-the-nosql-development.md","A very brief history of the NoSQL development",[19894],{"type":11,"value":21737,"toc":22418},[21738,21741,21751,21789,21800,21820,21890,21916,21919,21934,22009,22033,22043,22069,22102,22105,22131,22151,22174,22207,22217,22223,22301,22303,22414,22416],[14,21739,21734],{"id":21740},"a-very-brief-history-of-the-nosql-development",[18,21742,21743,21744,21747,21748,21750],{},"A very brief history of the NoSQL development – From Codd to Brewer and beyond\nI am still new to the movement that is now called ",[27,21745,21746],{},"NoSQL",", and therefore curiously following all the discussions\naround the CAP theorem, consistency levels like BASE, the immolation of several letters in the ACID paradigm and the\n‘demonization’ of the relational join operation. I wondered why long established techniques and paradigms might no\nlonger be valid and attended some of the ",[27,21749,20127],{}," conferences. These conferences are still small and very\ncommunicative, and I enjoyed them a lot! Last year in Barcelona a great inspiring talk on the development of NoSQL was\ngiven by Doug Turnbull (@softwaredoug), who is a historian as well as a computer scientist. He discussed a lot of\ninteresting points, and to some of these I will refer here, too. What is better suited to understand a new topic than\nwriting a review on its history? As this would be too time consuming a task, I will write down a very brief history of\nthe events (as far as I know about them) related to the NoSQL development, as well as some of my very own thoughts and\nimpressions on this topic. There are still a lot of questions troubling my mind…",[18,21752,21753,21754,21756,21757,21760,21764,21765,21768,21769,21772,21773,21776,21777,21780,21781,21784,21785,21788],{},"A short story of three famous papers\nThere is a lot of research going on in the field of ",[27,21755,21746],{},". Three papers are frequently cited in this context: Codds\nworks on ",[27,21758,21759],{},"large shared databanks",[585,21761,21763],{"href":21762},"#References","[1]",", the fundamental work that describes the foundation of relational\ndatabase systems and the relational algebra as its query language. The ",[27,21766,21767],{},"proof of Brewer’s conjecture"," by Gilbert and\nLynch",[585,21770,21771],{"href":21762},"[2]",", who show that, within a reasonable network model and given some premises, CAP is a ",[573,21774,21775],{},"pick two\nout of three"," choice. And the criticism on a ",[27,21778,21779],{},"one size fits all"," philosophy by Stonebraker and Çetintemel",[585,21782,21783],{"href":21762},"[3]",", a paper stating, more or less, the end of relational databases as ",[573,21786,21787],{},"the jack of all trades devices"," for\nstorage related problems.",[18,21790,21791,21792,21795,21796,21799],{},"Codds criticism\nTo understand Codds criticism on database systems we have to get back to the 1960s, the time when data in database\nsystems like IBM’s IMS or CODASYL systems was ",[27,21793,21794],{},"hierarchically ordered"," (i.e. tree structured) or ",[27,21797,21798],{},"arranged in\nnetworks"," (i.e. graph structured). Three major issues directly affect the applications that depended on the persisted\ndata in these systems:",[12474,21801,21802,21808,21814],{},[580,21803,21804,21807],{},[27,21805,21806],{},"Ordering dependence:"," The order of the records on the storage system is identical to the presentation of the\nrecords to the application. Hence, a change in this order can break the application.",[580,21809,21810,21813],{},[27,21811,21812],{},"Indexing dependence:"," If index structures are used, queries have to make explicit use of these indexes. Hence,\ndropping an index can invalidate a query and break the application.",[580,21815,21816,21819],{},[27,21817,21818],{},"Access path dependence:"," Data and relations between data are modelled as a tree or a network. If an applications\nrelies on these structures to access and retrieve data it can break if the structure is unknown or changed.",[18,21821,21822,21823,21826,21827,21830,21831,21834,21835,21838,21839,21842,21843,21846,21847,21850,21851,21854,21855,21858,21859,21862,21863,21866,21867,21870,21871,11792,21874,21877,21878,21881,21882,21885,21886,21889],{},"The relational data model: Separation of concerns and data consistency\nCodds basic idea to address these issues was the ",[27,21824,21825],{},"separation"," of the data model from the data representation on the\nstorage system. This can be achieved by modeling data and the relationships between data in terms of ",[27,21828,21829],{},"mathematical\nrelations",", structures that are, basically, sets of (ordered) ",[27,21832,21833],{},"tuples",". A ",[27,21836,21837],{},"database state",", in the simplest sense,\nis a time-varying collection of relations. To avoid anomalies, the ",[27,21840,21841],{},"database schema"," is ",[27,21844,21845],{},"normalized",", which leads\nto a distribution of the elements that constitute a single object of the domain onto several relations. The ",[27,21848,21849],{},"relational\nalgebra",", a set of operations defined on relations, is a suitable ",[27,21852,21853],{},"query language",", and as powerful (or expressive)\nas a ",[27,21856,21857],{},"first order calculus",". It produces new relations from given relations (and the algebra is in that sense **closed\n**). This query language solely depends on the relational data model and is completely independent from the underlying\nstorage of records on disks and from any defined indexing structures: All information the model bears can be retrieved\nand deduced by the query language. Codd and Stonebraker were among the first who implemented such query languages.\nToday, ",[27,21860,21861],{},"SQL",", a language based on the relational algebra, is the standard query language for relational database\nsystems.\nRelational database systems offer the concept of ",[27,21864,21865],{},"referential integrity",", mechanisms to add ",[27,21868,21869],{},"semantics"," to the model\nby using ",[27,21872,21873],{},"keys",[27,21875,21876],{},"foreign key relationships"," to ensure ",[27,21879,21880],{},"data consistency",". Concurrent access to a database is\nadministered by using ",[27,21883,21884],{},"transactions",". The famous ",[27,21887,21888],{},"ACID paradigm"," is part of (almost) all relational database systems\nand guarantees transaction to be",[577,21891,21892,21898,21904,21910],{},[580,21893,21894,21897],{},[27,21895,21896],{},"A","tomic (all operations of a transaction are executed, or none is)",[580,21899,21900,21903],{},[27,21901,21902],{},"C","onsistent (a transaction begins with a consistent state, and leaves the database in a consistent state)",[580,21905,21906,21909],{},[27,21907,21908],{},"I","solated (a transaction is executed as if it were the only transaction in the system)",[580,21911,21912,21915],{},[27,21913,21914],{},"D","urable (all changes of a transaction will be persisted to the storage system)",[18,21917,21918],{},"These features are tunable to some extend, leading to different isolation and consistency concepts. This is an important\npoint, not only in distributed systems.",[18,21920,21921,21922,21925,21926,21929,21930,21933],{},"Performance issues\nThere are some issues to be discussed when it comes to ",[27,21923,21924],{},"performance tuning"," of relational database systems: Not all\nmathematical properties of the relational algebra (or equivalent calculi) hold in the implementation of the query\nlanguages, and despite being declarative the ",[27,21927,21928],{},"order of the operations"," in a SQL statement can significantly affect the\nperformance. Equivalent reformulations of complex queries can immensely decrease response times. The ",[27,21931,21932],{},"relational join","\ncan be quite expensive, because, due to the normalization process that distributes components of one business object to\nseveral rows or even tables, intensive I/O traffic can be produced to gather the object’s components back from the\nstorage. And even the use of indexes does not always lead to acceptable response times for ad-hoc queries, so *\n*denormalization** is applied. This leads to data redundancy and brings back the carefully avoided anomalies. Often\nthese issues are tried to be addressed by **vertical scaling**, i.e. by increasing memory, power or storage of the\ndatabase server.",[18,21935,21936,21937,21940,21941,21944,21945,21948,21949,21952,21953,21956,21957,21960,21961,21963,21964,21967,21968,21971,21972,21975,21976,21979,21980,21983,21984,9207,21987,21990,21991,7314,21994,21997,21998,22000,22001,22004,22005,22008],{},"There is no such thing as a free lunch\nStarting with increasing popularity of the web and web businesses, the world became, naturally, more and more *\n*distributed**. As discussed above, there can be performance issues with relational databases, and these become more\nsignificant when scaling is ",[27,21938,21939],{},"horizontal",". Guaranteeing promises like ACID compliance becomes hard on scaling out,\nwhich is very popular nowadays: Use more computers with less performance rather than fewer computers with higher\nperformance. Commodity hardware is cheap and widely achievable. This distribution of systems comes unavoidably with the\ncharacteristics of the ",[27,21942,21943],{},"CAP theorem"," as stated as a conjecture by Brewer at PODC 2000",[585,21946,21947],{"href":21762},"[4]",": A\ndistributed systems cannot be (at the same time!!) ",[27,21950,21951],{},"consistent"," with all its data, ",[27,21954,21955],{},"available"," to serve all requests\nand ",[27,21958,21959],{},"tolerant to network partitioning",". The conjecture has been formally proven by Gilbert and Lynch in 2002 within a\nreasonable network model",[585,21962,21783],{"href":21762},". Hence: There is no free lunch with distributed data.",[585,21965,21966],{"href":21762},"[5]"," In\nCAP, the term ",[27,21969,21970],{},"consistency"," of a distributed system refers to a consistent view of all nodes in the system on the data\nthat is stored. ",[27,21973,21974],{},"Availability"," means that any request will get a response as long as there is no total network\nfailure. And ",[27,21977,21978],{},"partition tolerance"," is about dealing with failures or the unavailability of parts of the system. As in\nhighly distributed systems network failures are not avoidable, the ",[27,21981,21982],{},"P"," is often a premise, and the system can be\nchosen to be ",[27,21985,21986],{},"AP",[27,21988,21989],{},"CP",". Data consistency becomes an issue, and often ",[27,21992,21993],{},"BASE",[27,21995,21996],{},"B","asically ",[27,21999,21896],{},"vailable, soft\n",[27,22002,22003],{},"S","tate, **E**ventually Consistent) is the chosen consistency paradigm in distributed environments: The system is\nguaranteed to become consistent at some time in the future, but is not guaranteed to provide a consistent view on the\ndata at all times. That a distributed datastore can be tuned within CAP bounds (and one somehow has a continuous choice)\nwas pointed out by Brewer",[585,22006,22007],{"href":21762},"[6]"," as well as by others.",[18,22010,22011,22012,22015,22016,22019,22020,22022,22023,22026,22027,22029,22030,22032],{},"One size does not fit all\nOver time, relational database systems with SQL as query language became the ",[27,22013,22014],{},"de facto standard"," wherever a storage\nsystem was needed, ranging from a simple data storage backing up a small web application, to giant companies data\nmanagement systems, integration databases managing data exchange between lots of applications and data warehouses: ",[27,22017,22018],{},"One\ntechnology"," integrating all these totally different purposes. The task of choosing a database is often treated as a *\n*non-functional requirement**, answered by taking the relational database system that was always used. As pointed out\nby Stonebraker and Çetintemel",[585,22021,21783],{"href":21762},", there are limits to what applications a relational database system can\nbe used for. The consequence: **One size does not fit all.** A giant hammer is not a universal tool, and choosing the\ncorrect tool for a task becomes an option again in these days. Stonebraker and Çetintemel take stream processing as an\nexample to show how a simple system fitting the application’s needs can outperform a giant ",[573,22024,22025],{},"universal purpose","\nrelational database system by orders of magnitude, and state that ",[573,22028,21779],{}," can only be maintained as a\nmarketing illusion",[585,22031,21783],{"href":21762},". The paper and its sequel give several examples.",[18,22034,22035,22036,22039,22040,22042],{},"The rise of NoSQL\nAt this point NoSQL comes into play, by offering datastores optimized for ",[573,22037,22038],{},"special purposes",". The usual categorization\nclassifies the NoSQL stores into one of four categories (document store, key/value store, wide column store or graph\ndatabase), and places it at a specific side of the CAP triangle",[585,22041,21783],{"href":21762},", according to the properties they\noffer.",[18,22044,22045,22046,22049,22050,22052,22053,22056,22057,22060,22061,22064,22065,22068],{},"But: What exactly is NoSQL?\nIt is agreed on that NoSQL stands for ",[27,22047,22048],{},"not only SQL",". But there is still no common understanding about the concepts\nincluded into the term ",[27,22051,21746],{},". NoSQL stores are ",[27,22054,22055],{},"often"," schema-less, open source, non-relational, horizontally\nscalable, and use BASE as consistency mode. The term ",[27,22058,22059],{},"elasticity"," is used for stores that are scalable, schema-free,\nand allow for rapid replication and rapid changes. But how can these features be achieved? If there is no schema, the\napplication has to take care of the integrity of the data, as the datastore often cannot support decisions without\nknowledge about the structure. The design of many NoSQL datastores is bottom-up, optimized for horizontal scalability.\nThey often provide only simple low-level APIs (like simple set, get and put operations, ",[27,22062,22063],{},"sometimes"," realized\natomically). Modelling with NoSQL datastores ",[27,22066,22067],{},"feels"," totally different than modelling in the the relational world, and\nfollows a different philosophy.",[18,22070,22071,22072,22075,22076,22078,22079,22081,22082,22085,22086,22089,22090,22093,22094,22097,22098,22101],{},"Common misunderstandings\nThe NoSQL world is still lacking a commonly accepted terminology, and there are frequent misunderstandings: Relational\ndatabases gained their name from ",[27,22073,22074],{},"mathematical relations",", not from relationships between data tables implemented by\nforeign keys and referential integrity. The meaning of ",[27,22077,21902],{},", i.e. the idea of ",[27,22080,21880],{},", differs in ACID and\nCAP by refering to referential integrity or data being the same on different nodes. A ",[27,22083,22084],{},"BASE system"," will be consistent\nat some time in the future, this is a guaranty when the system has enough time and no more updates are altering the\nsystems state. ",[27,22087,22088],{},"ACID"," and NoSQL are not inherently mutual exclusive (see e.g. the graph database Neo4j",[585,22091,22092],{"href":21762},"[8]",", or the approach FoundationDB",[585,22095,22096],{"href":21762},"[9]"," has taken). And the terms ",[27,22099,22100],{},"sharding and replication"," are\nsometimes used synonymously, confusing data distribution with data redundancy. Not all of these misunderstandings\nmentioned here are exclusive problems in NoSQL, but produce confusion in all discussions about distributed systems.",[18,22103,22104],{},"A repetition of history?\nDoes the situation we are facing today resemble the one Codd faced when he wrote about the relational data model? There\nare clearly some similarities. But is it fair to compare the situation nowadays to the one of the 1960s? Nonetheless, I\nwill put two (arbitrarily chosen) examples for discussion.",[18,22106,22107,22108,22111,22112,22114,22115,22118,22119,22122,22123,22126,22127,22130],{},"The graph database Neo4j\n",[27,22109,22110],{},"Neo4j",", developed and maintained by Neotechnology",[585,22113,22092],{"href":21762},", is the most famous ",[27,22116,22117],{},"graph database"," to date.\nNeo4j is on the ",[27,22120,22121],{},"CA"," side of the CAP triangle",[585,22124,22125],{"href":21762},"[7]",". The data model is well-suited for a wide range of\napplications, ranging from recommendation systems to underground train time tables, and is used by many big companies.\nNeo4j is, undoubtedly, a great NoSQL database: Easy to set up and to use, lots of impressive application examples, and\ncompletely open source with a helpful community. I myself like working with that database a lot. Neo4j brings a query\nlanguage called ",[27,22128,22129],{},"Cypher"," that can be intuitively used and seems to be quite powerful. Let’s have a look at a typical\nCypher statement before Neo4j version 2.0:",[43,22132,22134],{"className":13786,"code":22133,"language":13788,"meta":48,"style":48},"START movie = node:\u003Cstrong>Movies\u003C/strong>(“title:Matrix”)\nMATCH movie\u003C-[:ACTS_IN]-actor\nRETURN actor.name;\n\n",[50,22135,22136,22141,22146],{"__ignoreMap":48},[53,22137,22138],{"class":55,"line":56},[53,22139,22140],{},"START movie = node:\u003Cstrong>Movies\u003C/strong>(“title:Matrix”)\n",[53,22142,22143],{"class":55,"line":86},[53,22144,22145],{},"MATCH movie\u003C-[:ACTS_IN]-actor\n",[53,22147,22148],{"class":55,"line":126},[53,22149,22150],{},"RETURN actor.name;\n",[18,22152,22153,22154,22157,22158,22162,22163,22165,22166,22169,22170,22173],{},"The first line of the statement reveals a direct dependency on an index called ",[573,22155,22156],{},"Movies"," (\ncompare ",[585,22159,22161],{"href":22160},"#CoddCritics","Codds issue"," no. 2.). Since version 2.0 of Neo4j this ",[573,22164,12808],{}," has been resolved. But how *\n_powerful** and ",[27,22167,22168],{},"expressive"," is Cypher, say, compared to a query language like the relational algebra (or some version\nof datalog, or whatever language you always used and know to its bones)? Can you express everything you always wanted to\nknow about your graph model using Cypher? As far as I know there is no obvious algebra underlying Cypher (some concept\ncomparable to a **path algebra** as proposed by Neubauer and Rodriguez",[585,22171,22172],{"href":21762},"[10]",") that would make Cypher\neasily accessible for a formal analysis. And the last resort to querying a graph in Neo4j, the core API, is inherently _\n*imperative**, so it depends on access paths and graph traversal strategies.",[18,22175,22176,22177,22180,22183,22184,22187,22188,22190,22191,22194,22195,22198,22199,22202,22203,22206],{},"The wide column store Cassandra\n",[27,22178,22179],{},"Apache Cassandra",[585,22181,22182],{"href":21762},"[11]"," is a famous ",[27,22185,22186],{},"wide column store",", suited to hold tons of data and resides on\nthe AP side of the CAP triangle",[585,22189,22125],{"href":21762},". Cassandra can be easily distributed, is highly available and\nprovides no single point of failure with tunable consistency. Cassandra is widely used in big companies for data\nanalysis. Data modelling in Cassandra follows different objectives than data modelling against a relational database:\nData does not need to be flat, which is actually a very nice property. The data that is required to answer a query\nagainst a Cassandra data model must reside in a ",[27,22192,22193],{},"single column family",", and hence, referential integrity is considered\na non-issue here. And the data modelling methodology is equally ",[27,22196,22197],{},"driven by queries and data",", data duplication is\nwanted here, whereas data duplication in relational database systems leads to unwanted anomalies. In opposition to\nrelational database systems, transactions are not supported by Cassandra. So, we deal with completely different\napproaches, and in Cassandra some issues that are carefully treated and avoided for the purpose of data consistency in\nrelational systems are purposely ignored to achieve a ",[27,22200,22201],{},"different goal",". In addition, Cassandra is shipped with the\nCassandra Query Language ",[27,22204,22205],{},"CQL",", that is in many ways similar to SQL:",[43,22208,22210],{"className":13786,"code":22209,"language":13788,"meta":48,"style":48},"SELECT name FROM employees WHERE department = 'Marketing';\n\n",[50,22211,22212],{"__ignoreMap":48},[53,22213,22214],{"class":55,"line":56},[53,22215,22216],{},"SELECT name FROM employees WHERE department = 'Marketing';\n",[18,22218,22219,22220,22222],{},"You can have SELECT, FROM and WHERE clauses in a query, and you can think in tables, rows and columns again. But to\nmatch for an attribute in a WHERE clause, you need an index on that column (compare ",[585,22221,22161],{"href":22160}," no.\n2.). And, unlike in SQL, you can not have subqueries. Again the question about the expressiveness of CQL remains\nunanswered, at least for me.",[18,22224,22225,22226,22229,22230,22233,22234,22237,22238,22241,22242,22245,22246,22249,22250,22253,22254,22257,22258,22261,22262,22265,22266,22269,22270,22273,22274,22277,22278,22281,22282,22285,22286,9207,22289,22292,22293,22296,22297,22300],{},"Attempting a conclusion\nI do not want, at any point and in any way, to offend any of the NoSQL datastore vendors or communities. The datastores\nall suit their purpose. As one can equivalently model a domain within each of the four pillars of NoSQL (at different\ngains and costs, obviously), one should carefully choose the database that suits the desired purpose. Data is the new\noil",[2223,22227],{"alt":22228,"src":21762},"[12]"," And care should be taken when it comes to the further evolution of a datastore. It would be a\npity if a datastore sacrificed good properties to achieve a ",[573,22231,22232],{},"general purposefulness",", running the risk of repeating the\nhistory of relational databases in several ways. Fast and highly optimized, highly specialized data stores open the\ndoors to highly sophisticated ",[27,22235,22236],{},"polyglot"," solutions, and here the relational databases are included as part of the\ndatabase landscape. But there are lessons learned from the pre-relational days, and some ideas that led to relational\ndatabase systems were not so bad at all, but ",[27,22239,22240],{},"revolutionary"," in the circumstances they were invented in. Looking back\nand learning seems to be the key to ",[27,22243,22244],{},"self-healing data"," and CRDTs as described by Shapiro et al",[585,22247,22248],{"href":21762},"[13]",",\nusing long known ideas from Lamports works",[585,22251,22252],{"href":21762},"[14]"," and mathematical lattice theory (to formally describe\nautomatic convergence towards a common supremum for differing data states). Maybe evolution of datastores in the NoSQL\nworld can be sped-up by increasing a datastores ",[573,22255,22256],{},"fitness"," by looking back and learning, by understanding why certain\ndecisions were made in earlier times, by understanding their consequences and hence avoiding mistakes. Temporary ",[573,22259,22260],{},"faulty\nstates"," (like direct index dependencies or access path dependencies) are not always avoidable, but to know of them is\nimportant and necessary. And often differing concepts are used on purpose. Pointing out specializations and weaknesses\nleads to more ",[27,22263,22264],{},"honest solutions"," (as nicely pointed out in a FoundationDB blogpost",[585,22267,22268],{"href":21762},"[15]","), and that\nwould drastically simplify the choice of the correct tool from the more than 150 existing NoSQL datastores",[585,22271,22272],{"href":21762},"[16]",".\nIn conclusion: I am really not sure if any uttered criticism is even fair. Going back to access, indexing and order\ndependencies can be a good choice in recent developments, and maybe NoSQL does not have to ",[573,22275,22276],{},"evolve out of this"," again,\nfor the sake of query performance. But is the separation of data and its representation on the storage not a good idea\nin the distributed world NoSQL stores reside in today? What makes the situation different from the ",[573,22279,22280],{},"pre-relational\ntime","? Supposedly, we will experience, at least to some extent, a ",[573,22283,22284],{},"reinvention of the wheel"," at certain points, and\ndoing this ",[27,22287,22288],{},"knowingly",[27,22290,22291],{},"unknowingly"," could be the question to ask! Honest database solutions are needed. If you\nknow your ",[27,22294,22295],{},"drawbacks",", do not hide them. And point out ",[27,22298,22299],{},"specializations"," and motivate them. I really cannot predict\nfuture developments in NoSQL. But maybe someone wants to share their experience here? Every comment and discussion is\nvery welcome!",[18,22302,13393],{},[577,22304,22305,22310,22315,22320,22325,22330,22335,22344,22353,22362,22367,22376,22382,22388,22394,22404],{},[580,22306,22307,22309],{},[53,22308,2546],{}," E.F. Codd: A Relational Model of Data for Large Shared Data Banks, Communications of the ACM, Vol. 13:6, 1970.",[580,22311,22312,22314],{},[53,22313,2409],{}," N. Lynch and S. Gilbert: “Brewer’s conjecture and the feasibility of consistent, available, partition-tolerant\nweb services”, ACM SIGACT News, Volume 33 Issue 2 (2002), pg. 51-59.",[580,22316,22317,22319],{},[53,22318,20200],{}," M. Stonebraker and U. Çetintemel: “One Size Fits All”: An Idea Whose Time Has Come and Gone, Proceedings of the\n21st International Conference on Data Engineering, 2005.",[580,22321,22322,22324],{},[53,22323,2762],{}," E. Brewer: Towards Robust Distributed Systems, Keynote at PODC, 2000.",[580,22326,22327,22329],{},[53,22328,20231],{}," HP white paper: There is no free lunch with distributed data white paper Consistency, availability, and\npartition-tolerance trade-offs on distributed data access systems.",[580,22331,22332,22334],{},[53,22333,2397],{}," E. Brewer: CAP Twelve Years Later: How the “Rules” Have Changed, Computer, IEEE Computer Society, 2012.",[580,22336,22337,8713,22339],{},[53,22338,20265],{},[585,22340,22343],{"href":22341,"rel":22342},"http://blog.nahurst.com/visual-guide-to-nosql-systems",[589],"Nathan Hurst’s blog",[580,22345,22346,8713,22348],{},[53,22347,2667],{},[585,22349,22352],{"href":22350,"rel":22351},"http://www.neotechnology.com/",[589],"Neotechnology",[580,22354,22355,8713,22357],{},[53,22356,20387],{},[585,22358,22361],{"href":22359,"rel":22360},"https://www.foundationdb.org/",[589],"FoundationDB",[580,22363,22364,22366],{},[53,22365,20399],{}," M. A. Rodriguez and P. Neubauer: A Path Algebra for Multi-Relational Graphs. CoRR, 2010.",[580,22368,22369,8713,22372],{},[53,22370,22371],{},"11",[585,22373,22179],{"href":22374,"rel":22375},"http://cassandra.apache.org/",[589],[580,22377,22378,22381],{},[53,22379,22380],{},"12"," E. Redmond and J.R. Wilson: Seven databases in seven weeks: A Guide to Modern Databases and the NoSQL Movement,\nPragmatic Bookshelf, 2012.",[580,22383,22384,22387],{},[53,22385,22386],{},"13"," Shapiro et al: A comprehensive study of Convergent and Commutative Replicated Data Types. INRIA, RR-7506, 2011.",[580,22389,22390,22393],{},[53,22391,22392],{},"14"," L. Lamport: Time, clocks, and the ordering of events in a distributed system. Comm ACM 21, 7, pp 558-565, 1978.",[580,22395,22396,8713,22399],{},[53,22397,22398],{},"15",[585,22400,22403],{"href":22401,"rel":22402},"https://web.archive.org/web/20150526180306/http://blog.foundationdb.com:80/on-lowered-expectations-transactions-scaling-and-honesty",[589],"FoundationDB blog",[580,22405,22406,8713,22409],{},[53,22407,22408],{},"16",[585,22410,22413],{"href":22411,"rel":22412},"http://nosql-database.org/",[589],"nosql-database.org",[16627,22415],{},[607,22417,989],{},{"title":48,"searchDepth":86,"depth":86,"links":22419},[],[613],"2014-06-26T09:47:48","A very brief history of the NoSQL development – From Codd to Brewer and beyond\\nI am still new to the movement that is now called NoSQL, and therefore curiously following all the discussions\\naround the CAP theorem, consistency levels like BASE, the immolation of several letters in the ACID paradigm and the\\n‘demonization’ of the relational join operation. I wondered why long established techniques and paradigms might no\\nlonger be valid and attended some of the NoSQL matters conferences. These conferences are still small and very\\ncommunicative, and I enjoyed them a lot! Last year in Barcelona a great inspiring talk on the development of NoSQL was\\ngiven by Doug Turnbull (@softwaredoug), who is a historian as well as a computer scientist. He discussed a lot of\\ninteresting points, and to some of these I will refer here, too. What is better suited to understand a new topic than\\nwriting a review on its history? As this would be too time consuming a task, I will write down a very brief history of\\nthe events (as far as I know about them) related to the NoSQL development, as well as some of my very own thoughts and\\nimpressions on this topic. There are still a lot of questions troubling my mind…","https://synyx.de/blog/a-very-brief-history-of-the-nosql-development/",{},"/blog/a-very-brief-history-of-the-nosql-development",{"title":21734,"description":22427},"A very brief history of the NoSQL development – From Codd to Brewer and beyond\nI am still new to the movement that is now called NoSQL, and therefore curiously following all the discussions\naround the CAP theorem, consistency levels like BASE, the immolation of several letters in the ACID paradigm and the\n‘demonization’ of the relational join operation. I wondered why long established techniques and paradigms might no\nlonger be valid and attended some of the NoSQL matters conferences. These conferences are still small and very\ncommunicative, and I enjoyed them a lot! Last year in Barcelona a great inspiring talk on the development of NoSQL was\ngiven by Doug Turnbull (@softwaredoug), who is a historian as well as a computer scientist. He discussed a lot of\ninteresting points, and to some of these I will refer here, too. What is better suited to understand a new topic than\nwriting a review on its history? As this would be too time consuming a task, I will write down a very brief history of\nthe events (as far as I know about them) related to the NoSQL development, as well as some of my very own thoughts and\nimpressions on this topic. There are still a lot of questions troubling my mind…","blog/a-very-brief-history-of-the-nosql-development",[22430,22431,22432,22433,22434,22435,20172,22436],"acid","base","cap","data-consistency","distributed-systems","nosql","relational-databases","A very brief history of the NoSQL development – From Codd to Brewer and beyond I am still new to the movement that is now called NoSQL, and therefore curiously…","E4VQmp41A7N7DFb9IGkrEz5RxgxQ2mEmLeWZnOHb5bA",{"id":22440,"title":22441,"author":22442,"body":22443,"category":22962,"date":22963,"description":22964,"extension":617,"link":22965,"meta":22966,"navigation":499,"path":22967,"seo":22968,"slug":22447,"stem":22970,"tags":22971,"teaser":22975,"__hash__":22976},"blog/blog/sass-support-for-web-applications-with-jetty-and-wro4j.md","Sass support for web applications with jetty and wro4j",[13434],{"type":11,"value":22444,"toc":22960},[22445,22448,22457,22460,22472,22498,22592,22610,22614,22648,22652,22681,22692,22699,22703,22761,22774,22778,22887,22890,22894,22944,22947,22952,22958],[14,22446,22441],{"id":22447},"sass-support-for-web-applications-with-jetty-and-wro4j",[18,22449,22450,22451,22456],{},"Suppose we voted for ",[585,22452,22455],{"href":22453,"rel":22454},"http://sass-lang.com/",[589],"Sass"," as the css preprocessor of our choice for a web application. Knowing\nthat css must be generated from our Sass code everytime a scss file is modified, we want to set up the project in a way\nthat enables fast turnaround cycles during development.",[18,22458,22459],{},"The Requirements are:",[577,22461,22462,22469],{},[580,22463,22464,22465,22468],{},"generated css should be bundled within the WAR when building the webapp for ",[573,22466,22467],{},"production"," on the continuous integration\nserver",[580,22470,22471],{},"changes made to Sass resources during development time should be available to the browser without requiring a rebuild\nof the webapp",[18,22473,22474,22475,10175,22480,22485,22486,22491,22492,22497],{},"One way to fit these requirements is to use ",[585,22476,22479],{"href":22477,"rel":22478},"http://www.eclipse.org/jetty/",[589],"Jetty",[585,22481,22484],{"href":22482,"rel":22483},"https://code.google.com/p/wro4j/",[589],"Web Resource Optimizer for Java",". wro4j provides Sass support with the JRuby\nbased ",[585,22487,22490],{"href":22488,"rel":22489},"https://code.google.com/p/wro4j/wiki/RubySassCss",[589],"RubySassCssProcessor"," which can be used both as a pre or post\nprocessor. In order to compile our scss into css when building the webapp we have to include\nthe ",[585,22493,22496],{"href":22494,"rel":22495},"https://code.google.com/p/wro4j/wiki/MavenPlugin",[589],"wro4j-maven-plugin"," into the projects POM:",[43,22499,22501],{"className":3792,"code":22500,"language":3794,"meta":48,"style":48},"\n\u003Cplugin>\n \u003Cgroupid>ro.isdc.wro4j\u003C/groupid>\n \u003Cartifactid>wro4j-maven-plugin\u003C/artifactid>\n \u003Cversion>${wro4jversion}\u003C/version>\n \u003Cconfiguration>\n \u003Cwromanagerfactory>ro.isdc.wro.maven.plugin.manager.factory.ConfigurableWroManagerFactory\u003C/wromanagerfactory>\n \u003Ccssdestinationfolder>${project.build.directory}/${project.build.finalName}/css/\u003C/cssdestinationfolder>\n \u003C/configuration>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cphase>prepare-package\u003C/phase>\n \u003Cgoals>\n \u003Cgoal>run\u003C/goal>\n \u003C/goals>\n \u003C/execution>\n \u003C/executions>\n\u003C/plugin>\n\n",[50,22502,22503,22507,22512,22517,22522,22527,22532,22537,22542,22547,22552,22557,22562,22567,22572,22577,22582,22587],{"__ignoreMap":48},[53,22504,22505],{"class":55,"line":56},[53,22506,500],{"emptyLinePlaceholder":499},[53,22508,22509],{"class":55,"line":86},[53,22510,22511],{},"\u003Cplugin>\n",[53,22513,22514],{"class":55,"line":126},[53,22515,22516],{}," \u003Cgroupid>ro.isdc.wro4j\u003C/groupid>\n",[53,22518,22519],{"class":55,"line":163},[53,22520,22521],{}," \u003Cartifactid>wro4j-maven-plugin\u003C/artifactid>\n",[53,22523,22524],{"class":55,"line":186},[53,22525,22526],{}," \u003Cversion>${wro4jversion}\u003C/version>\n",[53,22528,22529],{"class":55,"line":221},[53,22530,22531],{}," \u003Cconfiguration>\n",[53,22533,22534],{"class":55,"line":242},[53,22535,22536],{}," \u003Cwromanagerfactory>ro.isdc.wro.maven.plugin.manager.factory.ConfigurableWroManagerFactory\u003C/wromanagerfactory>\n",[53,22538,22539],{"class":55,"line":273},[53,22540,22541],{}," \u003Ccssdestinationfolder>${project.build.directory}/${project.build.finalName}/css/\u003C/cssdestinationfolder>\n",[53,22543,22544],{"class":55,"line":279},[53,22545,22546],{}," \u003C/configuration>\n",[53,22548,22549],{"class":55,"line":496},[53,22550,22551],{}," \u003Cexecutions>\n",[53,22553,22554],{"class":55,"line":503},[53,22555,22556],{}," \u003Cexecution>\n",[53,22558,22559],{"class":55,"line":509},[53,22560,22561],{}," \u003Cphase>prepare-package\u003C/phase>\n",[53,22563,22564],{"class":55,"line":515},[53,22565,22566],{}," \u003Cgoals>\n",[53,22568,22569],{"class":55,"line":521},[53,22570,22571],{}," \u003Cgoal>run\u003C/goal>\n",[53,22573,22574],{"class":55,"line":527},[53,22575,22576],{}," \u003C/goals>\n",[53,22578,22579],{"class":55,"line":533},[53,22580,22581],{}," \u003C/execution>\n",[53,22583,22584],{"class":55,"line":539},[53,22585,22586],{}," \u003C/executions>\n",[53,22588,22589],{"class":55,"line":545},[53,22590,22591],{},"\u003C/plugin>\n",[18,22593,22594,22595,22598,22599,22602,22603,11792,22606,22609],{},"The plugins ",[573,22596,22597],{},"run"," goal is bound to the ",[573,22600,22601],{},"prepare-package"," phase, so the WAR will contain the css resource in /css/ after\npackaging the web application. In addition to the plugin configuration we also have to provide some merging and pre/post\nprocessor related configuration settings in ",[27,22604,22605],{},"wro.xml",[27,22607,22608],{},"wro.properties"," respectively.",[18,22611,22612],{},[27,22613,22605],{},[43,22615,22617],{"className":3792,"code":22616,"language":3794,"meta":48,"style":48},"\n\u003Cgroups xmlns=\"http://www.isdc.ro/wro\">\n \u003Cgroup name=\"base\">\n \u003Ccss>/sass/base/*.scss\u003C/css>\n \u003C/group>\n\u003C/groups>\n\n",[50,22618,22619,22623,22628,22633,22638,22643],{"__ignoreMap":48},[53,22620,22621],{"class":55,"line":56},[53,22622,500],{"emptyLinePlaceholder":499},[53,22624,22625],{"class":55,"line":86},[53,22626,22627],{},"\u003Cgroups xmlns=\"http://www.isdc.ro/wro\">\n",[53,22629,22630],{"class":55,"line":126},[53,22631,22632],{}," \u003Cgroup name=\"base\">\n",[53,22634,22635],{"class":55,"line":163},[53,22636,22637],{}," \u003Ccss>/sass/base/*.scss\u003C/css>\n",[53,22639,22640],{"class":55,"line":186},[53,22641,22642],{}," \u003C/group>\n",[53,22644,22645],{"class":55,"line":221},[53,22646,22647],{},"\u003C/groups>\n",[18,22649,22650],{},[27,22651,22608],{},[43,22653,22655],{"className":288,"code":22654,"language":290,"meta":48,"style":48},"\nmanagerFactoryClassName=ro.isdc.wro.manager.factory.ConfigurableWroManagerFactory\npreProcessors=cssUrlRewriting\npostProcessors=rubySassCss,cssMinJawr\ndisableCache=true\n\n",[50,22656,22657,22661,22666,22671,22676],{"__ignoreMap":48},[53,22658,22659],{"class":55,"line":56},[53,22660,500],{"emptyLinePlaceholder":499},[53,22662,22663],{"class":55,"line":86},[53,22664,22665],{},"managerFactoryClassName=ro.isdc.wro.manager.factory.ConfigurableWroManagerFactory\n",[53,22667,22668],{"class":55,"line":126},[53,22669,22670],{},"preProcessors=cssUrlRewriting\n",[53,22672,22673],{"class":55,"line":163},[53,22674,22675],{},"postProcessors=rubySassCss,cssMinJawr\n",[53,22677,22678],{"class":55,"line":186},[53,22679,22680],{},"disableCache=true\n",[18,22682,22683,22684,22687,22688,22691],{},"The configuration settings for ",[573,22685,22686],{},"caching"," and the ",[573,22689,22690],{},"manager factory"," are necessary for development environment: at\ndevelopment time we want a servlet filter to trigger the css compilation process every time when the browser requests\nthe css.",[18,22693,22694,22695,22698],{},"To enable this mechanism we place the file ",[27,22696,22697],{},"override-web.xml"," in directory src/test/resources/",[18,22700,22701],{},[27,22702,22697],{},[43,22704,22706],{"className":3792,"code":22705,"language":3794,"meta":48,"style":48},"\n\u003Cweb-app version=\"3.0\" xmlns=\"http://java.sun.com/xml/ns/javaee\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemalocation=\"http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd\">\n \u003Cfilter>\n \u003Cfilter-name>wro\u003C/filter-name>\n \u003Cfilter-class>ro.isdc.wro.http.WroFilter\u003C/filter-class>\n \u003C/filter>\n \u003Cfilter-mapping>\n \u003Cfilter-name>wro\u003C/filter-name>\n \u003Curl-pattern>*.css\u003C/url-pattern>\n \u003C/filter-mapping>\n\u003C/web-app>\n\n",[50,22707,22708,22712,22717,22722,22727,22732,22737,22742,22746,22751,22756],{"__ignoreMap":48},[53,22709,22710],{"class":55,"line":56},[53,22711,500],{"emptyLinePlaceholder":499},[53,22713,22714],{"class":55,"line":86},[53,22715,22716],{},"\u003Cweb-app version=\"3.0\" xmlns=\"http://java.sun.com/xml/ns/javaee\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemalocation=\"http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd\">\n",[53,22718,22719],{"class":55,"line":126},[53,22720,22721],{}," \u003Cfilter>\n",[53,22723,22724],{"class":55,"line":163},[53,22725,22726],{}," \u003Cfilter-name>wro\u003C/filter-name>\n",[53,22728,22729],{"class":55,"line":186},[53,22730,22731],{}," \u003Cfilter-class>ro.isdc.wro.http.WroFilter\u003C/filter-class>\n",[53,22733,22734],{"class":55,"line":221},[53,22735,22736],{}," \u003C/filter>\n",[53,22738,22739],{"class":55,"line":242},[53,22740,22741],{}," \u003Cfilter-mapping>\n",[53,22743,22744],{"class":55,"line":273},[53,22745,22726],{},[53,22747,22748],{"class":55,"line":279},[53,22749,22750],{}," \u003Curl-pattern>*.css\u003C/url-pattern>\n",[53,22752,22753],{"class":55,"line":496},[53,22754,22755],{}," \u003C/filter-mapping>\n",[53,22757,22758],{"class":55,"line":503},[53,22759,22760],{},"\u003C/web-app>\n",[18,22762,22763,22764,22769,22770,22773],{},"override-web.xml is a web.xml that Jetty applies to a web application after the application’s own WEB-INF/web.xml\nwhich means that it can override values or add new elements.\nThe ",[585,22765,22768],{"href":22766,"rel":22767},"http://www.eclipse.org/jetty/documentation/current/jetty-maven-plugin.html",[589],"jetty-maven-plugin"," allows to\nconfigure the location of that file with the ",[27,22771,22772],{},"overridedescriptor"," tag:",[18,22775,22776],{},[27,22777,10094],{},[43,22779,22781],{"className":3792,"code":22780,"language":3794,"meta":48,"style":48},"\n\u003Cplugin>\n \u003Cgroupid>org.eclipse.jetty\u003C/groupid>\n \u003Cartifactid>jetty-maven-plugin\u003C/artifactid>\n \u003Cversion>9.2.0.RC0\u003C/version>\n \u003Cconfiguration>\n \u003Cwebapp>\n \u003Coverridedescriptor>${project.basedir}/src/test/resources/override-web.xml\u003C/overridedescriptor>\n \u003C/webapp>\n \u003C/configuration>\n \u003Cdependencies>\n \u003Cdependency>\n \u003Cgroupid>ro.isdc.wro4j\u003C/groupid>\n \u003Cartifactid>wro4j-core\u003C/artifactid>\n \u003Cversion>${wro4jversion}\u003C/version>\n \u003C/dependency>\n \u003Cdependency>\n \u003Cgroupid>ro.isdc.wro4j\u003C/groupid>\n \u003Cartifactid>wro4j-extensions\u003C/artifactid>\n \u003Cversion>${wro4jversion}\u003C/version>\n \u003C/dependency>\n \u003C/dependencies>\n\u003C/plugin>\n\n",[50,22782,22783,22787,22791,22796,22801,22806,22810,22815,22820,22825,22829,22834,22838,22843,22848,22853,22857,22861,22865,22870,22874,22878,22883],{"__ignoreMap":48},[53,22784,22785],{"class":55,"line":56},[53,22786,500],{"emptyLinePlaceholder":499},[53,22788,22789],{"class":55,"line":86},[53,22790,22511],{},[53,22792,22793],{"class":55,"line":126},[53,22794,22795],{}," \u003Cgroupid>org.eclipse.jetty\u003C/groupid>\n",[53,22797,22798],{"class":55,"line":163},[53,22799,22800],{}," \u003Cartifactid>jetty-maven-plugin\u003C/artifactid>\n",[53,22802,22803],{"class":55,"line":186},[53,22804,22805],{}," \u003Cversion>9.2.0.RC0\u003C/version>\n",[53,22807,22808],{"class":55,"line":221},[53,22809,22531],{},[53,22811,22812],{"class":55,"line":242},[53,22813,22814],{}," \u003Cwebapp>\n",[53,22816,22817],{"class":55,"line":273},[53,22818,22819],{}," \u003Coverridedescriptor>${project.basedir}/src/test/resources/override-web.xml\u003C/overridedescriptor>\n",[53,22821,22822],{"class":55,"line":279},[53,22823,22824],{}," \u003C/webapp>\n",[53,22826,22827],{"class":55,"line":496},[53,22828,22546],{},[53,22830,22831],{"class":55,"line":503},[53,22832,22833],{}," \u003Cdependencies>\n",[53,22835,22836],{"class":55,"line":509},[53,22837,3811],{},[53,22839,22840],{"class":55,"line":515},[53,22841,22842],{}," \u003Cgroupid>ro.isdc.wro4j\u003C/groupid>\n",[53,22844,22845],{"class":55,"line":521},[53,22846,22847],{}," \u003Cartifactid>wro4j-core\u003C/artifactid>\n",[53,22849,22850],{"class":55,"line":527},[53,22851,22852],{}," \u003Cversion>${wro4jversion}\u003C/version>\n",[53,22854,22855],{"class":55,"line":533},[53,22856,3831],{},[53,22858,22859],{"class":55,"line":539},[53,22860,3811],{},[53,22862,22863],{"class":55,"line":545},[53,22864,22842],{},[53,22866,22867],{"class":55,"line":2414},[53,22868,22869],{}," \u003Cartifactid>wro4j-extensions\u003C/artifactid>\n",[53,22871,22872],{"class":55,"line":2426},[53,22873,22852],{},[53,22875,22876],{"class":55,"line":2438},[53,22877,3831],{},[53,22879,22880],{"class":55,"line":2451},[53,22881,22882],{}," \u003C/dependencies>\n",[53,22884,22885],{"class":55,"line":2459},[53,22886,22591],{},[18,22888,22889],{},"Finally we configure the build to exclude Sass resources and wro4j configuration from the WAR:",[18,22891,22892],{},[27,22893,10094],{},[43,22895,22897],{"className":3792,"code":22896,"language":3794,"meta":48,"style":48},"\n\u003Cplugin>\n \u003Cartifactid>maven-war-plugin\u003C/artifactid>\n \u003Cconfiguration>\n \u003Cwarsourceexcludes>\n WEB-INF/wro.*,\n sass/**\n \u003C/warsourceexcludes>\n \u003C/configuration>\n\u003C/plugin>\n\n",[50,22898,22899,22903,22907,22912,22916,22921,22926,22931,22936,22940],{"__ignoreMap":48},[53,22900,22901],{"class":55,"line":56},[53,22902,500],{"emptyLinePlaceholder":499},[53,22904,22905],{"class":55,"line":86},[53,22906,22511],{},[53,22908,22909],{"class":55,"line":126},[53,22910,22911],{}," \u003Cartifactid>maven-war-plugin\u003C/artifactid>\n",[53,22913,22914],{"class":55,"line":163},[53,22915,22531],{},[53,22917,22918],{"class":55,"line":186},[53,22919,22920],{}," \u003Cwarsourceexcludes>\n",[53,22922,22923],{"class":55,"line":221},[53,22924,22925],{}," WEB-INF/wro.*,\n",[53,22927,22928],{"class":55,"line":242},[53,22929,22930],{}," sass/**\n",[53,22932,22933],{"class":55,"line":273},[53,22934,22935],{}," \u003C/warsourceexcludes>\n",[53,22937,22938],{"class":55,"line":279},[53,22939,22546],{},[53,22941,22942],{"class":55,"line":496},[53,22943,22591],{},[18,22945,22946],{},"That’s it. With this setup developers are now able to modify scss files and see the resulting css just by reloading the\npage in the browser.",[18,22948,22949],{},[27,22950,22951],{},"webapp project structure",[18,22953,22954],{},[2223,22955],{"alt":22956,"src":22957},"\"projectstructure\"","https://media.synyx.de/uploads//2014/05/projectstructure.png",[607,22959,989],{},{"title":48,"searchDepth":86,"depth":86,"links":22961},[],[613],"2014-05-26T07:31:24","Suppose we voted for Sass as the css preprocessor of our choice for a web application. Knowing\\nthat css must be generated from our Sass code everytime a scss file is modified, we want to set up the project in a way\\nthat enables fast turnaround cycles during development.","https://synyx.de/blog/sass-support-for-web-applications-with-jetty-and-wro4j/",{},"/blog/sass-support-for-web-applications-with-jetty-and-wro4j",{"title":22441,"description":22969},"Suppose we voted for Sass as the css preprocessor of our choice for a web application. Knowing\nthat css must be generated from our Sass code everytime a scss file is modified, we want to set up the project in a way\nthat enables fast turnaround cycles during development.","blog/sass-support-for-web-applications-with-jetty-and-wro4j",[22972,10891,22973,22974],"jetty","sass","wro4j","Suppose we voted for Sass as the css preprocessor of our choice for a web application. Knowing that css must be generated from our Sass code everytime a scss file…","07y_WJEuwsF8uiCSIOk5uCEmUU2bwmfO2w8n0KDcKlY",{"id":22978,"title":22979,"author":22980,"body":22981,"category":23179,"date":23180,"description":23181,"extension":617,"link":23182,"meta":23183,"navigation":499,"path":23184,"seo":23185,"slug":22985,"stem":23187,"tags":23188,"teaser":23191,"__hash__":23192},"blog/blog/nosql-still-matters.md","NoSQL still matters",[19894],{"type":11,"value":22982,"toc":23177},[22983,22986,22999,23010,23018,23021,23033,23058,23063,23079,23088,23111,23116,23123,23131,23154],[14,22984,22979],{"id":22985},"nosql-still-matters",[18,22987,22988,22989,22991,22992,22995,22998],{},"Vom 28. April bis zum 30. April fand die",[573,22990,20127],{}," in Köln statt. Austragungsort war das ",[27,22993,22994],{},"KOMED im MediaPark,",[27,22996,22997],{},"nur knapp 15 Gehminuten von Kölner Hauptbahnhof und Dom entfernt."," Neben zwei Tagen mit Vorträgen gab es auch einen\nTrainingstag. Die angebotenen Workshops waren hochwertig, und einige Firmen wie Neotechnology haben die Chance genutzt\num neben der Konferenz auch ein Meetup durchzuführen. Die Vorträge auf der Tagung waren breit gestreut, und aufgrund der\nnoch überschaubaren Teilnehmerzahl gab es viel Gelegenheit für Diskussion. Die Teilnahme an diesen Veranstaltungen kann\nich uneingeschränkt all denjenigen empfehlen, die sich für neue Datenbanktechnologien und BigData interessieren, und\neinen Einblick aus sowohl anwendungsorientierter als auch theoretisch fundierter Sicht gewinnen wollen. Inhalt dieses\nkurzen BlogPosts sollen jedoch ein paar Punkte sein, an denen sich die Geister noch immer scheiden, und die auch auf der\nKonferenz in Köln immer wieder zu Diskussionen geführt haben.",[18,23000,23001,8713,23004,8713,23007],{},[27,23002,23003],{},"Was",[27,23005,23006],{},"gehört zu",[27,23008,23009],{},"NoSQL?",[18,23011,23012,23014,23015,23017],{},[27,23013,21746],{}," ist eine noch junge und damit beständigem Wandel und der Suche nach Definitionen unterworfene Disziplin der\nInformatik. Wie so oft trifft man hier oft auf altbekannte Konzepte in neuem Gewand. Es besteht nach wie vor keine\neinheitliche Meinung, was unter dem Begriff",[573,23016,21746],{}," genau zu verstehen ist. Einigkeit herrscht mittlerweile in der\nKlassifikation der Datastores in vier Kategorien: Key/Value Stores, Document Stores, Wide Column Stores und\nGraphdatenbanken. Darüber hinaus werden viele Schlagworte und deren Zugehörigkeit zu NoSQL kontrovers diskutiert und je\nnach Anwendungskontext auch unterschiedlich interpretiert: CAP, BASE, ACID, Datenredundanz, horizontale und vertikale\nSkalierung, Normalisierung und Denormalisierung, Sharding und Replikation, und viele mehr.",[18,23019,23020],{},"Diskussionen über die folgenden drei Punkte sind mir in Köln wiederholt aufgefallen, die mir nicht immer gut und\neinheitlich verstanden schienen.",[18,23022,23023,23026,23027,8713,23030],{},[27,23024,23025],{},"Namensgebung r**","elationale*",[27,23028,23029],{},"*r",[27,23031,23032],{},"Datenbanksysteme",[18,23034,23035,23036,23039,23040,23043,23044,23047,23048,23050,23051,8713,23054,23057],{},"Relational sind relationale Datenbanksysteme, weil sie ihren Ursprung in den mathematischen Relationen haben.Relationen\nmeinen nicht die Beziehungen zwischen den Tupeln der Datenbanktabellen, sondern bezeichnen Mengen von Tupeln. Formal ist\neine Relation nichts weiter als eine benannte Teilmenge des kartesischen Produktes nicht-leerer Mengen, und deren\nElemente sind Tupel. Die ",[27,23037,23038],{},"relationale Algebra",", bzw. äquivalente Kalküle wie der ",[27,23041,23042],{},"TRC"," oder ",[27,23045,23046],{},"DRC",", sind die\nGrundlage für relationale Anfragesprachen (und damit letztlich auch für ",[27,23049,21861],{},"). Dieser Relationsbegriff hat erst\neinmal nichts zu tun mit referentieller Integrität, d.h. der ",[573,23052,23053],{},"inneren",[573,23055,23056],{},"Konsistenz"," der in einer relationalen Datenbank\ngespeicherten Tupel.",[18,23059,23060],{},[27,23061,23062],{},"Konsistenz: Das C in ACID und CAP",[18,23064,23065,23066,23068,23069,1628,23071,23074,23075,23078],{},"Der Buchstabe ",[27,23067,21902],{}," in den Akronymen ",[27,23070,22088],{},[27,23072,23073],{},"CAP"," steht beide Male für Konsistenz (engl. ",[27,23076,23077],{},"Consistency","), meint\naber jeweils einen anderen Konsistenzbegriff. Die Konsistenz in ACID bedeutet, dass nur Übergänge von gültigen Zuständen\nin gültige Zustände erlaubt sind, und zwar im Sinne der für die Datenbank definierten referentiellen Integrität. Dies\nmeint also eine ‘innere Konsistenz’ der in einer Datenbank gespeicherten Tupel. Konsistenz im Sinne von CAP bedeutet,\ndass jeder Knoten im verteilten System zu jedem Zeitpunkt die ‘richtige’ Antwort zu jeder Anfrage liefert. Die\nAnforderung an die Konsistenz hängt damit vom Service ab. Der vielleicht natürlichste Konsistenzbegriff ist der der\nlinearisierbaren Konsistenz: Dies bedeutet, das jeder Client, der Anfragen an das verteilte System stellt, den Eindruck\nhat, das alle Request/Response-Operationen von einem einzigen, zentralen Server beantwortet werden. (Formal heißt das,\ndass eine totale Ordnung dieser Operationen existieren muss, so das jede Operation als sofort und exklusiv ausgeführt\nerscheint.)",[18,23080,23081,23084,23085],{},[27,23082,23083],{},"Vielleicht konsistent**","e Systeme*",[27,23086,23087],{},"*?",[18,23089,23090,23091,23094,23095,21997,23097,23099,23100,23102,23103,23106,23107,23110],{},"In der NoSQL Welt wird häufig der Begriff der ",[27,23092,23093],{},"eventual consiste**","ncy** angetroffen (u.a. als Bestandteil des\nAkronyms BASE: ",[27,23096,21996],{},[27,23098,21896],{},"vailable, ",[27,23101,22003],{},"oft state, **E**ventual consistency). Das englische Wort ",[573,23104,23105],{},"eventual","\nist nicht mit dem deutschen Wort ",[573,23108,23109],{},"eventuell"," zu übersetzen, sondern eher mit *schlussendlich*! Eventual consistency ist\nein Konsistenzmodel für verteilte Systeme: Ein verteiltes System erfüllt dieses Modell, wenn es für ein gespeichertes\nDatum, vorausgesetzt das keine Updates mehr für dieses erfolgen, irgendwann für alle Anfragen nach diesem Datum den\nselben, zuletzt aktualisierten Wert liefert. Eventual consistency bedeutet nicht, dass das System ‘vielleicht mal’ einen\nkonsistenten Zustand erreicht.",[18,23112,23113],{},[27,23114,23115],{},"Diskussion",[18,23117,23118,23119,23122],{},"Durch die zunehmende Zahl an verfügbaren Datastores und deren Spezialisierung wird es auch zunehmend wichtiger, genau zu\nwissen, für welchen Anwendungsbereich und Use-Case ein Datastore gewählt werden soll. Davon hängt ab, welches\nKonsistenzmodell zur Anwendung kommen soll, welches Datenmodell sich anbietet und wie Sharding- und\nReplikationsstrategien zu wählen sind. Insbesondere dann, wenn auch ",[573,23120,23121],{},"polyglotte"," Lösungen zum Einsatz kommen sollen. Die\nteilweise noch vorhandenen unterschiedlichen Auffassungen der Definitionen und die resultierenden Missverständnisse\nmachen die Diskussionen im Bereich NoSQL nach wie vor spannend, zumal umfassende Langzeiterfahrungen aufgrund des Alters\ndieser Technologien noch nicht vorliegen können. Kommentare und Meinungen hierzu sind ausdrücklich erwünscht!",[18,23124,23125,8713,23128],{},[27,23126,23127],{},"Ausgewählte",[27,23129,23130],{},"Literatur",[18,23132,23133,8713,23136,8713,23139,8713,23142,23149,23150,23153],{},[27,23134,23135],{},"Es gibt eine Vielzahl lesenswerter Artikel und",[27,23137,23138],{},"Bücher",[27,23140,23141],{},"zu den oben diskutierten Themen. Eine",[27,23143,23144,23145,23148],{},"nützliche\n** ",[27,23146,23147],{},"(**","wenn auch subjektive*","*)** ",[27,23151,23152],{},"Auswahl findet sich nachstehend. Auch d","ie in diesem Blog geäußerten\nGedanken und Ausführungen folgen teilweise diese Büchern und Artikeln.",[577,23155,23156,23159,23162,23165,23168,23171,23174],{},[580,23157,23158],{},"Seth Gilbert and Nancy Lynch. Brewer’s conjecture and the feasibility of consistent, available, partition-tolerant\nweb services. SIGACT News, vol. 33, no. 2, pp. 51-59, 2002",[580,23160,23161],{},"Stefan Edlich, Achim Friedland, Jens Hampe, Benjamin Brauer und Markus Brückner. NoSQL: Einstieg in die Welt\nnichtrelationaler Web 2.0 Datenbanken. Hanser Verlag. 2011",[580,23163,23164],{},"Eric Brewer, “CAP Twelve Years Later: How the “Rules” Have Changed,” Computer, vol. 45, no. 2, pp. 23-29, 2012",[580,23166,23167],{},"Seth Gilbert and Nancy A. Lynch. Perspectives on the CAP Theorem. Computer, vol. 45, no. 2, pp. 30-36, 2012",[580,23169,23170],{},"Pramod J. Sadalage and Martin Fowler. NoSQL Distilled: A Brief Guide to the Emerging World of Polyglot Persistence.\nAddison-Wesley Professional. 2012",[580,23172,23173],{},"Eric Redmond and Jim R. Wilson. Seven Databases in Seven Weeks. Pragmatic Bookshelf. 2012",[580,23175,23176],{},"Katarina Grolinger, Wilson A Higashino, Abhinav Tiwari, and Miriam AM Capretz. Data management in cloud environments:\nNoSQL and NewSQL data stores. Journal of Cloud Computing: Advances, Systems and Applications, vol. 2, no. 22, 2013",{"title":48,"searchDepth":86,"depth":86,"links":23178},[],[613],"2014-05-16T00:10:11","Vom 28. April bis zum 30. April fand dieNoSQL matters in Köln statt. Austragungsort war das KOMED im MediaPark,nur knapp 15 Gehminuten von Kölner Hauptbahnhof und Dom entfernt. Neben zwei Tagen mit Vorträgen gab es auch einen\\nTrainingstag. Die angebotenen Workshops waren hochwertig, und einige Firmen wie Neotechnology haben die Chance genutzt\\num neben der Konferenz auch ein Meetup durchzuführen. Die Vorträge auf der Tagung waren breit gestreut, und aufgrund der\\nnoch überschaubaren Teilnehmerzahl gab es viel Gelegenheit für Diskussion. Die Teilnahme an diesen Veranstaltungen kann\\nich uneingeschränkt all denjenigen empfehlen, die sich für neue Datenbanktechnologien und BigData interessieren, und\\neinen Einblick aus sowohl anwendungsorientierter als auch theoretisch fundierter Sicht gewinnen wollen. Inhalt dieses\\nkurzen BlogPosts sollen jedoch ein paar Punkte sein, an denen sich die Geister noch immer scheiden, und die auch auf der\\nKonferenz in Köln immer wieder zu Diskussionen geführt haben.","https://synyx.de/blog/nosql-still-matters/",{},"/blog/nosql-still-matters",{"title":22979,"description":23186},"Vom 28. April bis zum 30. April fand dieNoSQL matters in Köln statt. Austragungsort war das KOMED im MediaPark,nur knapp 15 Gehminuten von Kölner Hauptbahnhof und Dom entfernt. Neben zwei Tagen mit Vorträgen gab es auch einen\nTrainingstag. Die angebotenen Workshops waren hochwertig, und einige Firmen wie Neotechnology haben die Chance genutzt\num neben der Konferenz auch ein Meetup durchzuführen. Die Vorträge auf der Tagung waren breit gestreut, und aufgrund der\nnoch überschaubaren Teilnehmerzahl gab es viel Gelegenheit für Diskussion. Die Teilnahme an diesen Veranstaltungen kann\nich uneingeschränkt all denjenigen empfehlen, die sich für neue Datenbanktechnologien und BigData interessieren, und\neinen Einblick aus sowohl anwendungsorientierter als auch theoretisch fundierter Sicht gewinnen wollen. Inhalt dieses\nkurzen BlogPosts sollen jedoch ein paar Punkte sein, an denen sich die Geister noch immer scheiden, und die auch auf der\nKonferenz in Köln immer wieder zu Diskussionen geführt haben.","blog/nosql-still-matters",[22430,22431,22435,23189,23190],"nosql-matters","relationale-datenbanksysteme","Vom 28. April bis zum 30. April fand die NoSQL matters in Köln statt. Austragungsort war das KOMED im MediaPark, nur knapp 15 Gehminuten von Kölner Hauptbahnhof und Dom entfernt. Neben…","GlaGdlYSIlDm0ys1WiCUbckmRqMC-oTJl0c3TNet_HI",{"id":23194,"title":23195,"author":23196,"body":23197,"category":23468,"date":23469,"description":23470,"extension":617,"link":23471,"meta":23472,"navigation":499,"path":23473,"seo":23474,"slug":23201,"stem":23476,"tags":23477,"teaser":23482,"__hash__":23483},"blog/blog/synyx-berlin-expert-days-2014.md","synyx @ Berlin Expert Days 2014",[7799],{"type":11,"value":23198,"toc":23461},[23199,23202,23211,23215,23218,23222,23236,23239,23250,23253,23261,23265,23289,23292,23298,23302,23317,23358,23364,23368,23426,23430,23439,23442,23446,23455,23458],[14,23200,23195],{"id":23201},"synyx-berlin-expert-days-2014",[18,23203,23204,23205,23210],{},"Last weekend our conference train got rolling again. A group of twelve synyx guys and gals boarded the ICE to our\ncapital, heading for the ",[585,23206,23209],{"href":23207,"rel":23208},"http://bed-con.org/",[589],"Berlin Expert Days",", a nice and small two-day developer conference. The\nanticipation was high as the topics and speakers were promising and we were looking forward to having a nice time inside\nand outside of the conference.",[14,23212,23214],{"id":23213},"talks","Talks",[18,23216,23217],{},"The following talks were my (highly subjective) Top 3 of the conference:",[649,23219,23221],{"id":23220},"_1-eberhard-wolff-death-to-java-app-servers","1. Eberhard Wolff – Death to Java App Servers!",[18,23223,23224,23229,23230,23235],{},[585,23225,23228],{"href":23226,"rel":23227},"https://twitter.com/ewolff",[589],"Eberhard Wolff"," started his talk with the provoking thesis\nthat ",[585,23231,23234],{"href":23232,"rel":23233},"http://www.slideshare.net/ewolff/java-application-servers-are-dead",[589],"Java app servers are dead"," and that there is\nno point in using them any longer. At first I thought this was strongly exeggarated and probably not true but then he\nbacked his statement with a surprisingly high number of really good arguments. He made me hate application servers :-(.",[18,23237,23238],{},"His key points among others were:",[577,23240,23241,23244,23247],{},[580,23242,23243],{},"In reality there is rarely such a thing as multiple applications running on one app server. The reason are numerous\nisolation issues",[580,23245,23246],{},"The application server is just another part of the application itself, individually configured for only this\napplication. The application depends on the app server and vice versa",[580,23248,23249],{},"App servers lead to an unnecessary complex infrastructure with high turnaround and deployment costs, making continuous\nintegration and delivery really hard",[18,23251,23252],{},"In the end Wolff did not yet provide an elaborated alternative but hinted that an architecture of standalone micro\nservice applications could be a much better way to go.",[649,23254,23256],{"id":23255},"bedconewolff",[585,23257,23260],{"href":23258,"rel":23259},"https://media.synyx.de/uploads//2014/04/BedCon.jpeg",[589],"BedConEWolff",[649,23262,23264],{"id":23263},"_2-jochen-mader-vertx-for-world-domination","2. Jochen Mader – VERT.X for World Domination",[18,23266,23267,23272,23273,23278,23279,23282,23283,23288],{},[585,23268,23271],{"href":23269,"rel":23270},"https://twitter.com/codepitbull",[589],"Jochen Mader’s"," talk about the distributed JVM platform ",[585,23274,23277],{"href":23275,"rel":23276},"http://vertx.io/",[589],"VERT.X"," was\nhighly informative and entertaining. While talking with incredible speed and a stylish outfit (wearing Star Trek\nt-shirt ",[27,23280,23281],{},"and"," socks) he presented\nhis ",[585,23284,23287],{"href":23285,"rel":23286},"http://www.slideshare.net/codepitbull/vertx-for-worlddomination",[589],"plan to conquer the world with robots",". He\ndemonstrated the prototype of his version of Skynet, consisting of a RasPi cluster and a Lego Mindstorms robot running\nVERT.X instances.",[18,23290,23291],{},"This effectively indicated the power of VERT.X and its many features, as you can connect services of many different JVM\nlanguages without much effort in a VERT.X cluster, that very intelligently takes care of all needed communication and\nresilience issues by itself. Mader pointed out the modularity, event-driven architecture, scalability and multi\nlanguage capability of VERT.X clusters, convincing his audience of the power of this solution.",[18,23293,23294],{},[2223,23295],{"alt":23296,"src":23297},"\"worlddomination\"","https://media.synyx.de/uploads//2014/04/worlddomination.jpg",[649,23299,23301],{"id":23300},"_3-michael-plöd-caching-in-business-applications","3. Michael Plöd – Caching in Business Applications",[18,23303,23304,23305,23310,23311,23316],{},"While the first two of this top three list were a clear pick for me, there were many equally good talks battling for the\nthird rank. In the end I picked ",[585,23306,23309],{"href":23307,"rel":23308},"https://twitter.com/bitboss",[589],"Michael Plöd’s"," talk,\nwho ",[585,23312,23315],{"href":23313,"rel":23314},"http://de.slideshare.net/mploed/caching-fur-business-anwendungen-deutsch",[589],"shed light on the many aspects of caching",".\nAfter explaining the demand and applications of caching in general, he enumerated a nice list of best practices and\npatterns everyone should follow when designing caches:",[577,23318,23319,23322,23325,23328,23331,23334,23337,23340,23343,23346,23349,23352,23355],{},[580,23320,23321],{},"Identify appropriate layers for caching",[580,23323,23324],{},"Stay local as long as possible",[580,23326,23327],{},"Prefer invalidation over replication",[580,23329,23330],{},"Avoid large heap sizes only for caching",[580,23332,23333],{},"Consider a distributed cache for very large amounts of data",[580,23335,23336],{},"The Op-Guy is your best friend!",[580,23338,23339],{},"Only cache data that is suited for caching (read-mostly, expensive to get)",[580,23341,23342],{},"Use proved and tested cache implementations and NEVER EVER write your own implementation",[580,23344,23345],{},"Introduce caches in three steps (optimize the application itself → introduce local cache → introduce distributed\ncache)",[580,23347,23348],{},"Optimize serialization of objects",[580,23350,23351],{},"Abstract your cache provider",[580,23353,23354],{},"Store often used data as near to your application as possible",[580,23356,23357],{},"Use off-heap storage for cache instances of 4GB or more",[18,23359,23360],{},[2223,23361],{"alt":23362,"src":23363},"\"caching\"","https://media.synyx.de/uploads//2014/04/caching.jpg",[649,23365,23367],{"id":23366},"_4-n-many-many-more","4.-n. Many, many more",[18,23369,23370,23371,23376,23377,23382,23383,23386,23387,11792,23392,23397,23398,99,23403,23408,23409,23414,23415,23420,23421,986],{},"In addition to the top three there was a high density of competent speakers with interesting and informative talks. I\nwant to point out our homeboy ",[585,23372,23375],{"href":23373,"rel":23374},"https://twitter.com/fhopf",[589],"Florian Hopf"," who gave a\nnice ",[585,23378,23381],{"href":23379,"rel":23380},"https://speakerdeck.com/exensio/search-driven-applications",[589],"overview on the state of search driven applications","\ntogether with his colleague ",[585,23384,18746],{"href":18744,"rel":23385},[589],". They demonstrated that it is worthier than ever\ntaking a look on Solr and ElasticSearch to implement your queries. Also definitely worth mentioning\nare ",[585,23388,23391],{"href":23389,"rel":23390},"https://twitter.com/stilkov",[589],"Stefan Tilkov",[585,23393,23396],{"href":23394,"rel":23395},"https://twitter.com/Eigenbrodtm",[589],"Martin Eigenbrodt"," with their\ndemonstration and evaluation\nof ",[585,23399,23402],{"href":23400,"rel":23401},"https://speakerdeck.com/stilkov/restful-http-on-the-jvm",[589],"REST HTTP implementations in different JVM languages",[585,23404,23407],{"href":23405,"rel":23406},"https://twitter.com/StefanZoerner",[589],"Stefan Zörner","\nwith a useful categorization\nof ",[585,23410,23413],{"href":23411,"rel":23412},"http://www.embarc.de/vortrag-berlin-expert-days-2014-verunfallte-softwarearchitektur/",[589],"software architecture and different ways to evaluate your architecture",",\nand ",[585,23416,23419],{"href":23417,"rel":23418},"https://twitter.com/timmo_gierke",[589],"Timmo Freudl-Gierke"," with a\ninsightful ",[585,23422,23425],{"href":23423,"rel":23424},"https://speakerdeck.com/timmo/micro-services-die-verheissungen-konnten-eintreten",[589],"practical overview on micro services",[14,23427,23429],{"id":23428},"the-trend","The Trend",[18,23431,23432,23433,23438],{},"The clear winner in the category “trending buzzword” was ",[27,23434,23435],{},[573,23436,23437],{},"micro service",". It ran like a common thread through the\nwhole conference. Not only Freudl-Gierke’s talk about micro services in practice took care of the topic. Over the\nconference we learned that it is time to inaugurate an era of standalone micro service applications of any language that\ntalk together over REST interfaces, get clustered with VERT.X, collect their logs centrally using logstash and kibana\nand intelligently use their local and distributed caches thus saving failed architectures. We saw how to create them\neasily using Spring Boot and how to supply continuous integration for all of them using Docker.",[18,23440,23441],{},"These are the highly anticipated expectations – it lies in the nature of hyped buzzwords that fulfilling these\nexpectations is a completely different matter. Let’s see what the future has in stock for us!",[14,23443,23445],{"id":23444},"time-off","Time Off",[18,23447,23448,23449,23454],{},"So much for the technical mumble jumble. Let’s take some time off and talk about one of the greatest benefits of the\nBEDCon – that it takes place in Berlin! There are not many cities where you can enjoy your evenings in the same way as\nin our capital. During our three days in Berlin we got yelled at by an Italian waiter for not ordering another Grappa,\nconsumed numerous instances of “",[585,23450,23453],{"href":23451,"rel":23452},"http://en.wikipedia.org/wiki/Shandy#Diesel",[589],"Diesel","”, visited a punk rock concert, an\nunderground live music club and a russian disco and on the walk between the different locations we enjoyed our\non-the-way-beers, that we got from a “Späti”.",[18,23456,23457],{},"A tech-savvy taxi driver with a dash cam showed us a video of a police car causing an accident on his Galaxy Note\n10.1 (while speeding through the city way above the speed limit) and an US-American waitress in a Cuban restaurant\ntook turns in laughing and singing while serving delicious meals and Cocktails.",[18,23459,23460],{},"It is difficult to collect so many different experiences in such a short time anywhere else and we enjoyed every night\nuntil as late as reasonably possible. These great experiences in combination with the high quality of conference talks\nwill ensure that next year a large amount of synyx dudes and dudettes will again board the ICE to Berlin.",{"title":48,"searchDepth":86,"depth":86,"links":23462},[23463,23464,23465,23466,23467],{"id":23220,"depth":126,"text":23221},{"id":23255,"depth":126,"text":23260},{"id":23263,"depth":126,"text":23264},{"id":23300,"depth":126,"text":23301},{"id":23366,"depth":126,"text":23367},[613],"2014-04-09T11:05:51","Last weekend our conference train got rolling again. A group of twelve synyx guys and gals boarded the ICE to our\\ncapital, heading for the Berlin Expert Days, a nice and small two-day developer conference. The\\nanticipation was high as the topics and speakers were promising and we were looking forward to having a nice time inside\\nand outside of the conference.","https://synyx.de/blog/synyx-berlin-expert-days-2014/",{},"/blog/synyx-berlin-expert-days-2014",{"title":23195,"description":23475},"Last weekend our conference train got rolling again. A group of twelve synyx guys and gals boarded the ICE to our\ncapital, heading for the Berlin Expert Days, a nice and small two-day developer conference. The\nanticipation was high as the topics and speakers were promising and we were looking forward to having a nice time inside\nand outside of the conference.","blog/synyx-berlin-expert-days-2014",[23478,23479,7720,22686,7721,23480,23481],"application-servers","bedcon","micro-services","vertx","Last weekend our conference train got rolling again. A group of twelve synyx guys and gals boarded the ICE to our capital, heading for the Berlin Expert Days, a nice…","N0aO9hcqDcPBr8hnNgL5-vK3OcSKPXQY-v36liC2VVQ",{"id":23485,"title":23486,"author":23487,"body":23488,"category":24587,"date":24588,"description":48,"extension":617,"link":24589,"meta":24590,"navigation":499,"path":24591,"seo":24592,"slug":23492,"stem":24593,"tags":24594,"teaser":24601,"__hash__":24602},"blog/blog/code-gluse.md","Code gluse",[16523],{"type":11,"value":23489,"toc":24581},[23490,23493,23496,23506,23509,23512,23515,23521,23531,23538,23541,23544,23553,23623,23629,23632,23660,23667,23719,23722,23915,23918,23921,23925,23928,23945,23951,23954,23964,23967,23970,23977,23984,24013,24016,24019,24052,24055,24058,24111,24118,24124,24127,24130,24226,24228,24232,24241,24246,24258,24261,24270,24276,24278,24301,24307,24335,24445,24448,24451,24454,24460,24463,24466,24469,24472,24578],[14,23491,23486],{"id":23492},"code-gluse",[649,23494,23486],{"id":23495},"code-gluse-1",[18,23497,23498,23499,23502,23503,23505],{},"Today’s post targets an API, which has been released on Dec. 11, 2006; the ",[50,23500,23501],{},"javax.scripting"," package ",[53,23504,2546],{}," and a lot of\ngood articles that have been written around it.",[18,23507,23508],{},"The intention for this post is not about ‘how to use the scripting packaging’, but about gluse. So what do I mean with\nthe phrase gluse? Gluse is a coinage",[18,23510,23511],{},"for glue and (re)usage. As many of the Java developer know about the plenty of good libraries from maven central /\ngithub and the integration process, a few of them",[18,23513,23514],{},"might ask how to integrate libraries from other languages as well. As many of the every day problems have already bean\naddressed, there is a good chance that someone else has done the job for you and is willing to share.",[18,23516,23517,23518,23520],{},"Sometimes it’s written in pure Java, sometimes in a different language. Let’s see how to integrate the latter\nlibraries. (StackOverflow lists a dozen ",[53,23519,2409],{}," btw.)",[18,23522,23523,23524,23527,23528,23530],{},"The next parts will give you some information in form of three examples. The first and second example will address\nJavascript, as Javascript is getting more and more into the focus of developers and Oracle will ship their new engine\n‘Nashorn’ with the next Java 8 release, while the third example will target a more complex example using JRuby. ",[27,23525,23526],{},"All\nexamples"," can be downloaded from ",[53,23529,20200],{},". So it’s up to you if you want to read the sources in parallel, afterwards, or by\nplaying with the code in your IDE instantly.",[18,23532,23533,23534,23537],{},"All code examples have been written to be compatible with Java 1.6. See the note in example two, when it comes to the\n",[50,23535,23536],{},"bind"," function.",[649,23539,23540],{"id":14574},"Proxy",[18,23542,23543],{},"Lets discuss the first example: We want to replace parts of a string using a regular expression, but hook into the\nprocess of manipulating the matching elements, before the replacement eventually takes place – by adding some new\ncontent or returning something completely different.",[18,23545,23546,23547,23550,23551,986],{},"In Java you would probably end up using the ",[50,23548,23549],{},"java.util.regex.Pattern",", creating a matcher and iterating over the matched\ngroups and so on. There’s nothing wrong about it, but Javascript already defines that kind of behaviour ",[53,23552,2762],{},[43,23554,23556],{"className":7263,"code":23555,"language":7265,"meta":48,"style":48},"\"first 000 second\".replace(/[a-zA-Z]+/g, function (match) {\n return \"[\" + match.toUpperCase() + \"]\";\n});\n",[50,23557,23558,23593,23619],{"__ignoreMap":48},[53,23559,23560,23563,23565,23568,23570,23572,23575,23577,23579,23582,23584,23586,23588,23591],{"class":55,"line":56},[53,23561,23562],{"class":63},"\"first 000 second\"",[53,23564,986],{"class":82},[53,23566,23567],{"class":59},"replace",[53,23569,1067],{"class":82},[53,23571,4422],{"class":63},[53,23573,23574],{"class":89},"[a-zA-Z]",[53,23576,11314],{"class":389},[53,23578,4422],{"class":63},[53,23580,23581],{"class":389},"g",[53,23583,99],{"class":82},[53,23585,6170],{"class":389},[53,23587,7314],{"class":82},[53,23589,23590],{"class":6186},"match",[53,23592,11325],{"class":82},[53,23594,23595,23597,23600,23603,23606,23609,23612,23614,23617],{"class":55,"line":86},[53,23596,11818],{"class":389},[53,23598,23599],{"class":63}," \"[\"",[53,23601,23602],{"class":389}," +",[53,23604,23605],{"class":82}," match.",[53,23607,23608],{"class":59},"toUpperCase",[53,23610,23611],{"class":82},"() ",[53,23613,11314],{"class":389},[53,23615,23616],{"class":63}," \"]\"",[53,23618,1727],{"class":82},[53,23620,23621],{"class":55,"line":126},[53,23622,7422],{"class":82},[18,23624,23625,23626,23628],{},"As both, Rhino and Nashorn, support the javax.script.Invocable type, we will create an interface to address the\nproblem – you’ll find the whole documentation in the mentioned project ",[53,23627,20200],{},", but for the sake of completeness:",[18,23630,23631],{},"Apply the ‘pattern’ on the ‘sequence’ and call the ‘callback’ on each matched element. Either on ‘all’ matching\nelements, or on ‘any’ (first makes sense here).",[43,23633,23635],{"className":288,"code":23634,"language":290,"meta":48,"style":48},"\n public interface Replacement {\n public abstract CharSequence any (Pattern pattern, CharSequence sequence, Function\u003CCharSequence, CharSequence> callback);\n public abstract CharSequence all (Pattern pattern, CharSequence sequence, Function\u003CCharSequence, CharSequence> callback);\n }\n\n",[50,23636,23637,23641,23646,23651,23656],{"__ignoreMap":48},[53,23638,23639],{"class":55,"line":56},[53,23640,500],{"emptyLinePlaceholder":499},[53,23642,23643],{"class":55,"line":86},[53,23644,23645],{}," public interface Replacement {\n",[53,23647,23648],{"class":55,"line":126},[53,23649,23650],{}," public abstract CharSequence any (Pattern pattern, CharSequence sequence, Function\u003CCharSequence, CharSequence> callback);\n",[53,23652,23653],{"class":55,"line":163},[53,23654,23655],{}," public abstract CharSequence all (Pattern pattern, CharSequence sequence, Function\u003CCharSequence, CharSequence> callback);\n",[53,23657,23658],{"class":55,"line":186},[53,23659,860],{},[18,23661,23662,23663,23666],{},"The final Java code would look like the following (Java 8 users will flavour the new lambda syntax:\n",[50,23664,23665],{},"(match) -> { return \"[\" + match + \"]\"; }","):",[43,23668,23670],{"className":288,"code":23669,"language":290,"meta":48,"style":48},"\n Replacement replacement;\n replacement = replacement ();\n CharSequence enclosed = replacement.all (Pattern.compile (\"\\\\d+\"), \"could you please enclose 1234, 789, 345 with brackets?\", new Function\u003CCharSequence, CharSequence> () {\n @Override\n public CharSequence apply (CharSequence sequence) {\n return \"[\" + sequence + \"]\";\n }\n });\n /* replacement () returns a proxy of the type Replacement, using the shipped js scripting engine. the evaluated script returns an instance, which can be encapsulated using the Invocable#getInterface signature */\n\n",[50,23671,23672,23676,23681,23686,23691,23696,23701,23706,23710,23714],{"__ignoreMap":48},[53,23673,23674],{"class":55,"line":56},[53,23675,500],{"emptyLinePlaceholder":499},[53,23677,23678],{"class":55,"line":86},[53,23679,23680],{}," Replacement replacement;\n",[53,23682,23683],{"class":55,"line":126},[53,23684,23685],{}," replacement = replacement ();\n",[53,23687,23688],{"class":55,"line":163},[53,23689,23690],{}," CharSequence enclosed = replacement.all (Pattern.compile (\"\\\\d+\"), \"could you please enclose 1234, 789, 345 with brackets?\", new Function\u003CCharSequence, CharSequence> () {\n",[53,23692,23693],{"class":55,"line":186},[53,23694,23695],{}," @Override\n",[53,23697,23698],{"class":55,"line":221},[53,23699,23700],{}," public CharSequence apply (CharSequence sequence) {\n",[53,23702,23703],{"class":55,"line":242},[53,23704,23705],{}," return \"[\" + sequence + \"]\";\n",[53,23707,23708],{"class":55,"line":273},[53,23709,3242],{},[53,23711,23712],{"class":55,"line":279},[53,23713,7379],{},[53,23715,23716],{"class":55,"line":496},[53,23717,23718],{}," /* replacement () returns a proxy of the type Replacement, using the shipped js scripting engine. the evaluated script returns an instance, which can be encapsulated using the Invocable#getInterface signature */\n",[18,23720,23721],{},"The Javascript implementation would look like:",[43,23723,23725],{"className":7263,"code":23724,"language":7265,"meta":48,"style":48},"\n(function () {\n function replace (regex, content, callback) {\n ...\n }\n var Replacement = function () {};\n Replacement.prototype.any = function (regex, content, callback) {\n return replace (new RegExp (regex), content, callback);\n };\n Replacement.prototype.all = function (regex, content, callback) {\n return replace (new RegExp (regex, 'g'), content, callback);\n };\n return new Replacement ();\n}) ();\n\n",[50,23726,23727,23731,23739,23763,23768,23772,23787,23820,23838,23843,23873,23894,23898,23910],{"__ignoreMap":48},[53,23728,23729],{"class":55,"line":56},[53,23730,500],{"emptyLinePlaceholder":499},[53,23732,23733,23735,23737],{"class":55,"line":86},[53,23734,1067],{"class":82},[53,23736,6170],{"class":389},[53,23738,14775],{"class":82},[53,23740,23741,23744,23747,23749,23752,23754,23756,23758,23761],{"class":55,"line":126},[53,23742,23743],{"class":389}," function",[53,23745,23746],{"class":59}," replace",[53,23748,7314],{"class":82},[53,23750,23751],{"class":6186},"regex",[53,23753,99],{"class":82},[53,23755,17321],{"class":6186},[53,23757,99],{"class":82},[53,23759,23760],{"class":6186},"callback",[53,23762,11325],{"class":82},[53,23764,23765],{"class":55,"line":163},[53,23766,23767],{"class":389}," ...\n",[53,23769,23770],{"class":55,"line":186},[53,23771,860],{"class":82},[53,23773,23774,23777,23780,23782,23784],{"class":55,"line":221},[53,23775,23776],{"class":389}," var",[53,23778,23779],{"class":59}," Replacement",[53,23781,1245],{"class":389},[53,23783,14995],{"class":389},[53,23785,23786],{"class":82}," () {};\n",[53,23788,23789,23792,23794,23797,23799,23802,23804,23806,23808,23810,23812,23814,23816,23818],{"class":55,"line":242},[53,23790,23791],{"class":89}," Replacement",[53,23793,986],{"class":82},[53,23795,23796],{"class":89},"prototype",[53,23798,986],{"class":82},[53,23800,23801],{"class":59},"any",[53,23803,1245],{"class":389},[53,23805,14995],{"class":389},[53,23807,7314],{"class":82},[53,23809,23751],{"class":6186},[53,23811,99],{"class":82},[53,23813,17321],{"class":6186},[53,23815,99],{"class":82},[53,23817,23760],{"class":6186},[53,23819,11325],{"class":82},[53,23821,23822,23825,23827,23829,23832,23835],{"class":55,"line":273},[53,23823,23824],{"class":389}," return",[53,23826,23746],{"class":59},[53,23828,7314],{"class":82},[53,23830,23831],{"class":389},"new",[53,23833,23834],{"class":59}," RegExp",[53,23836,23837],{"class":82}," (regex), content, callback);\n",[53,23839,23840],{"class":55,"line":279},[53,23841,23842],{"class":82}," };\n",[53,23844,23845,23847,23849,23851,23853,23855,23857,23859,23861,23863,23865,23867,23869,23871],{"class":55,"line":496},[53,23846,23791],{"class":89},[53,23848,986],{"class":82},[53,23850,23796],{"class":89},[53,23852,986],{"class":82},[53,23854,19024],{"class":59},[53,23856,1245],{"class":389},[53,23858,14995],{"class":389},[53,23860,7314],{"class":82},[53,23862,23751],{"class":6186},[53,23864,99],{"class":82},[53,23866,17321],{"class":6186},[53,23868,99],{"class":82},[53,23870,23760],{"class":6186},[53,23872,11325],{"class":82},[53,23874,23875,23877,23879,23881,23883,23885,23888,23891],{"class":55,"line":503},[53,23876,23824],{"class":389},[53,23878,23746],{"class":59},[53,23880,7314],{"class":82},[53,23882,23831],{"class":389},[53,23884,23834],{"class":59},[53,23886,23887],{"class":82}," (regex, ",[53,23889,23890],{"class":63},"'g'",[53,23892,23893],{"class":82},"), content, callback);\n",[53,23895,23896],{"class":55,"line":509},[53,23897,23842],{"class":82},[53,23899,23900,23902,23905,23907],{"class":55,"line":515},[53,23901,11007],{"class":389},[53,23903,23904],{"class":389}," new",[53,23906,23779],{"class":59},[53,23908,23909],{"class":82}," ();\n",[53,23911,23912],{"class":55,"line":521},[53,23913,23914],{"class":82},"}) ();\n",[18,23916,23917],{},"The Java code for this example would probably be less – measured in LOC – but the basic steps needed for an integration\ncan be shown pretty good and two worlds might benefit from your certainly approved works 🙂",[18,23919,23920],{},"One nice feature about this kind of mechanism is, that you can quickly prototype your functionality, while still able to\nchange parts of the implementation using pure Java afterwards.",[649,23922,23924],{"id":23923},"modularity","Modularity",[18,23926,23927],{},"Let’s come to the second example. You may have written a bunch of Javascript files in a modular way or just don’t want\nto put everything in a single file. While the first example showed how to proxy your implementation, the second example\nwill show you a basic approach for dynamically loading further resource and/or code files. The following signature\nshould be provided and accessible from all scripts.",[43,23929,23931],{"className":7263,"code":23930,"language":7265,"meta":48,"style":48},"require(\"org.geonames.reverse\");\n",[50,23932,23933],{"__ignoreMap":48},[53,23934,23935,23938,23940,23943],{"class":55,"line":56},[53,23936,23937],{"class":59},"require",[53,23939,1067],{"class":82},[53,23941,23942],{"class":63},"\"org.geonames.reverse\"",[53,23944,1079],{"class":82},[18,23946,23947,23948,23950],{},"The similarity to requirejs ",[53,23949,20231],{}," is intentional and you may want to extend the signature to be fully compliant, but this\nwill be left for your curiosity 🙂",[18,23952,23953],{},"Loading resources from ‘unknown’, or from ‘at runtime unknown’ sources is by nature critical, as the code is executed in\nthe same JVM which hosts your application as well. Therefore, you should only load resources you really trust.",[18,23955,23956,23957,23959,23960,23963],{},"You could achieve this by verifying the signature of the reviewed files using a PKI ",[53,23958,2397],{}," infrastructure – ",[50,23961,23962],{},"javax.crypto","\nis your friend here and fortunately you can implement this in Java and/or use a security library to accomplish this\ntask.",[18,23965,23966],{},"Simply spoken: Always check the integrity if you provide a way for modifications.",[18,23968,23969],{},"If you are already familiar with the scripting engine API, you might have noticed that require is a function and not an\n‘object’.",[18,23971,23972,23973,23976],{},"Even when functions in Javascript are objects, there is no semantic way to say that ",[573,23974,23975],{},"“this object is a function and can\nbe invoked”"," if you share it between the environment.",[18,23978,23979,23980,23983],{},"There might be support for some engines, but not for the others and ",[50,23981,23982],{},"javax.script"," API is designed for general purpose –\ndepending on engine internal interfaces is not desired.",[43,23985,23987],{"className":7263,"code":23986,"language":7265,"meta":48,"style":48},"obj.require(\n \"org.geonames.reverse\",\n); /* nah, okay but requires additional knowledge obj. */\n",[50,23988,23989,23998,24005],{"__ignoreMap":48},[53,23990,23991,23994,23996],{"class":55,"line":56},[53,23992,23993],{"class":82},"obj.",[53,23995,23937],{"class":59},[53,23997,1139],{"class":82},[53,23999,24000,24003],{"class":55,"line":86},[53,24001,24002],{"class":63}," \"org.geonames.reverse\"",[53,24004,2252],{"class":82},[53,24006,24007,24010],{"class":55,"line":126},[53,24008,24009],{"class":82},"); ",[53,24011,24012],{"class":3698},"/* nah, okay but requires additional knowledge obj. */\n",[18,24014,24015],{},"Fortunately there is a solution. You can attach a script context to the evaluation process and reuse the context later\non, but you shouldn’t use the internal context as it could leak if your engine leaks.",[18,24017,24018],{},"Pseudo Algorithm:",[12474,24020,24021,24040,24043,24046,24049],{},[580,24022,24023,24024,99,24027,99,24030,99,24033,24036,24037],{},"create a java function object which can load your resources from the ",[573,24025,24026],{},"classpath",[573,24028,24029],{},"internet",[573,24031,24032],{},"local filesystem",[573,24034,24035],{},"…","\nwith a method signature you know. (a function/SAM object) like: ",[50,24038,24039],{},"void apply (String)",[580,24041,24042],{},"create a script context and attach the object from 1. to it with a variable called ‘whatever’ (really whatever name\nyou like)",[580,24044,24045],{},"evaluate an inline require function before you evaluate your business code, which puts your require function into the\nscope of the context from 2.",[580,24047,24048],{},"evaluate your business codes which relies on the require function with the same scope from 2.",[580,24050,24051],{},"have fun",[18,24053,24054],{},"var require = function (library) { whatever.apply (library); }",[18,24056,24057],{},"The above code would be sufficient, but has some drawbacks as it only works if the ‘whatever’ object is in the correct\nexecution scope and if it provides the correct signature – someone could overwrite the binding or does something you\nsimply don’t want him/her to do. We need some slight improvements to correct this.",[43,24059,24061],{"className":7263,"code":24060,"language":7265,"meta":48,"style":48},"var require = function (library) {\n this.apply(library);\n}.bind(whatever);\ndelete whatever;\n",[50,24062,24063,24081,24093,24103],{"__ignoreMap":48},[53,24064,24065,24068,24070,24072,24074,24076,24079],{"class":55,"line":56},[53,24066,24067],{"class":389},"var",[53,24069,15976],{"class":59},[53,24071,1245],{"class":389},[53,24073,14995],{"class":389},[53,24075,7314],{"class":82},[53,24077,24078],{"class":6186},"library",[53,24080,11325],{"class":82},[53,24082,24083,24085,24087,24090],{"class":55,"line":86},[53,24084,12015],{"class":89},[53,24086,986],{"class":82},[53,24088,24089],{"class":59},"apply",[53,24091,24092],{"class":82},"(library);\n",[53,24094,24095,24098,24100],{"class":55,"line":126},[53,24096,24097],{"class":82},"}.",[53,24099,23536],{"class":59},[53,24101,24102],{"class":82},"(whatever);\n",[53,24104,24105,24108],{"class":55,"line":163},[53,24106,24107],{"class":389},"delete",[53,24109,24110],{"class":82}," whatever;\n",[11259,24112,24113],{},[18,24114,24115,24116],{},"“The bind() method creates a new function that, when called, has its this keyword set to the provided value, with a\ngiven sequence of arguments preceding any provided when the new function is called.” ",[53,24117,20265],{},[18,24119,24120,24121,24123],{},"If the bind function is not available by your Rhino environment, you may want to look at the implementation from\nPrototype ",[53,24122,2667],{}," et al. and add it manually with the same procedure.",[18,24125,24126],{},"We delete the ‘whatever’ object from the script context afterwards.",[18,24128,24129],{},"You need the following Java code as glue:",[43,24131,24133],{"className":288,"code":24132,"language":290,"meta":48,"style":48},"\n// internal context for variables\nfinal Bindings bindings = scripting.newBindings ();\n bindings.put (Importer, new Function\u003CString, Void> () {\n // load a library 'argument' from the 'lib' directory\n @Override\n public Void apply (String argument) {\n // trusting returns either a valid stream object or throws an 'untrusted code' exception\n String resource;\n resource = String.format (\"/lib/%s.js\", argument);\n scripting.evaluate (trusting (resource), context);\n return null;\n }\n});\ncontext.setBindings (bindings, ScriptContext.ENGINE_SCOPE);\n// add require function to the scope before the application script is loaded\nscripting.evaluate (requirejs (Importer), context);\n// execute the script ultimately\nscripting.evaluate (applicationjs (), context);\n\n",[50,24134,24135,24139,24144,24149,24154,24159,24163,24168,24173,24178,24183,24188,24193,24197,24201,24206,24211,24216,24221],{"__ignoreMap":48},[53,24136,24137],{"class":55,"line":56},[53,24138,500],{"emptyLinePlaceholder":499},[53,24140,24141],{"class":55,"line":86},[53,24142,24143],{},"// internal context for variables\n",[53,24145,24146],{"class":55,"line":126},[53,24147,24148],{},"final Bindings bindings = scripting.newBindings ();\n",[53,24150,24151],{"class":55,"line":163},[53,24152,24153],{}," bindings.put (Importer, new Function\u003CString, Void> () {\n",[53,24155,24156],{"class":55,"line":186},[53,24157,24158],{}," // load a library 'argument' from the 'lib' directory\n",[53,24160,24161],{"class":55,"line":221},[53,24162,9049],{},[53,24164,24165],{"class":55,"line":242},[53,24166,24167],{}," public Void apply (String argument) {\n",[53,24169,24170],{"class":55,"line":273},[53,24171,24172],{}," // trusting returns either a valid stream object or throws an 'untrusted code' exception\n",[53,24174,24175],{"class":55,"line":279},[53,24176,24177],{}," String resource;\n",[53,24179,24180],{"class":55,"line":496},[53,24181,24182],{}," resource = String.format (\"/lib/%s.js\", argument);\n",[53,24184,24185],{"class":55,"line":503},[53,24186,24187],{}," scripting.evaluate (trusting (resource), context);\n",[53,24189,24190],{"class":55,"line":509},[53,24191,24192],{}," return null;\n",[53,24194,24195],{"class":55,"line":515},[53,24196,860],{},[53,24198,24199],{"class":55,"line":521},[53,24200,7422],{},[53,24202,24203],{"class":55,"line":527},[53,24204,24205],{},"context.setBindings (bindings, ScriptContext.ENGINE_SCOPE);\n",[53,24207,24208],{"class":55,"line":533},[53,24209,24210],{},"// add require function to the scope before the application script is loaded\n",[53,24212,24213],{"class":55,"line":539},[53,24214,24215],{},"scripting.evaluate (requirejs (Importer), context);\n",[53,24217,24218],{"class":55,"line":545},[53,24219,24220],{},"// execute the script ultimately\n",[53,24222,24223],{"class":55,"line":2414},[53,24224,24225],{},"scripting.evaluate (applicationjs (), context);\n",[18,24227,16887],{},[649,24229,24231],{"id":24230},"reporting","Reporting",[18,24233,24234,24235,24237,24238,24240],{},"In the last example I want to explain how to integrate a script engine, which is not shipped by default – JRuby ",[53,24236,20387],{},".\nThe idea behind is to embed code of ruby gems into your application, especially PDFKit ",[53,24239,20399],{}," for this example. PDFKit\ndescribes itself with",[11259,24242,24243],{},[18,24244,24245],{},"“Create PDFs using plain old HTML+CSS. Uses wkhtmltopdf on the back-end which renders HTML using Webkit.”",[18,24247,24248,24249,24252,24253,24255,24256,986],{},"Mostly you don’t want to handle HTML content directly, as your data is often stored in form of a ‘model’. Our solution\nshould therefore target the transformation from: ",[50,24250,24251],{},"Model -> HTML -> PDF",", which can be achieved using e.g. the nice Jade\n",[53,24254,22371],{}," language for the rendering process, especially Jade4j ",[53,24257,22380],{},[18,24259,24260],{},"Instead of writing the integration code for wkhtmltopdf, we will base on the work of PDFKit and write some JRuby glue.",[18,24262,24263,24264,24266,24267,24269],{},"If you need some information about ‘gem bundling’ I would recommend the articles/examples from Sieger ",[53,24265,22386],{}," and Harada\n",[53,24268,22392],{}," as a starting point.",[18,24271,24272,24273,24275],{},"You will find a local file-based repository in the project, as I wanted Maven ",[53,24274,22398],{}," to handle all the dependencies, but\nany other repository might work fine. It simply depends on your infrastructure and what suits you best.",[18,24277,24018],{},[12474,24279,24280,24283,24286,24292,24295,24298],{},[580,24281,24282],{},"put jruby-complete on your classpath, as the library ships the jsr223 implementation",[580,24284,24285],{},"put the converted pdfkit bundle on your classpath",[580,24287,24288,24289],{},"put any other needed library on your classpath ",[53,24290,24291],{},"jade4j, guava, …",[580,24293,24294],{},"write some jruby code to instantiate a configured pdfkit object",[580,24296,24297],{},"proxy the returned jruby object from 4. with a java interface",[580,24299,24300],{},"convert a jade (or differently) generated html stream to pdf using the proxy from 5.",[18,24302,24303,24304,24306],{},"I’ll show you the glue for the proxy only. Please download the project under ",[53,24305,20200],{}," if you want to see the remaining\nparts.",[43,24308,24310],{"className":288,"code":24309,"language":290,"meta":48,"style":48},"\npublic interface Pdfy {\n public boolean convert (InputStream streamin, OutputStream streamout);\n public boolean convert (InputStream streamin, OutputStream streamout, Map\u003CString, String> options);\n}\n\n",[50,24311,24312,24316,24321,24326,24331],{"__ignoreMap":48},[53,24313,24314],{"class":55,"line":56},[53,24315,500],{"emptyLinePlaceholder":499},[53,24317,24318],{"class":55,"line":86},[53,24319,24320],{},"public interface Pdfy {\n",[53,24322,24323],{"class":55,"line":126},[53,24324,24325],{}," public boolean convert (InputStream streamin, OutputStream streamout);\n",[53,24327,24328],{"class":55,"line":163},[53,24329,24330],{}," public boolean convert (InputStream streamin, OutputStream streamout, Map\u003CString, String> options);\n",[53,24332,24333],{"class":55,"line":186},[53,24334,282],{},[43,24336,24340],{"className":24337,"code":24338,"language":24339,"meta":48,"style":48},"language-ruby shiki shiki-themes github-light github-dark","\nclass Pdfy\n def initialize(stylesheet)\n @stylesheet = stylesheet\n end\n def convert(streamin, streamout, options = {})\n begin\n html = streamin.to_io.read\n kit = PDFKit.new(html, options)\n if @stylesheet\n kit.stylesheets \u003C\u003C @stylesheet\n end\n out = streamout.to_io\n out.binmode \u003C\u003C kit.to_pdf\n out.flush\n rescue\n return false\n end\n true\n end\nend\n\n","ruby",[50,24341,24342,24346,24351,24356,24361,24366,24371,24376,24381,24386,24391,24396,24401,24406,24411,24416,24421,24426,24431,24436,24440],{"__ignoreMap":48},[53,24343,24344],{"class":55,"line":56},[53,24345,500],{"emptyLinePlaceholder":499},[53,24347,24348],{"class":55,"line":86},[53,24349,24350],{},"class Pdfy\n",[53,24352,24353],{"class":55,"line":126},[53,24354,24355],{}," def initialize(stylesheet)\n",[53,24357,24358],{"class":55,"line":163},[53,24359,24360],{}," @stylesheet = stylesheet\n",[53,24362,24363],{"class":55,"line":186},[53,24364,24365],{}," end\n",[53,24367,24368],{"class":55,"line":221},[53,24369,24370],{}," def convert(streamin, streamout, options = {})\n",[53,24372,24373],{"class":55,"line":242},[53,24374,24375],{}," begin\n",[53,24377,24378],{"class":55,"line":273},[53,24379,24380],{}," html = streamin.to_io.read\n",[53,24382,24383],{"class":55,"line":279},[53,24384,24385],{}," kit = PDFKit.new(html, options)\n",[53,24387,24388],{"class":55,"line":496},[53,24389,24390],{}," if @stylesheet\n",[53,24392,24393],{"class":55,"line":503},[53,24394,24395],{}," kit.stylesheets \u003C\u003C @stylesheet\n",[53,24397,24398],{"class":55,"line":509},[53,24399,24400],{}," end\n",[53,24402,24403],{"class":55,"line":515},[53,24404,24405],{}," out = streamout.to_io\n",[53,24407,24408],{"class":55,"line":521},[53,24409,24410],{}," out.binmode \u003C\u003C kit.to_pdf\n",[53,24412,24413],{"class":55,"line":527},[53,24414,24415],{}," out.flush\n",[53,24417,24418],{"class":55,"line":533},[53,24419,24420],{}," rescue\n",[53,24422,24423],{"class":55,"line":539},[53,24424,24425],{}," return false\n",[53,24427,24428],{"class":55,"line":545},[53,24429,24430],{}," end\n",[53,24432,24433],{"class":55,"line":2414},[53,24434,24435],{}," true\n",[53,24437,24438],{"class":55,"line":2426},[53,24439,24365],{},[53,24441,24442],{"class":55,"line":2438},[53,24443,24444],{},"end\n",[18,24446,24447],{},"Maven will produce an assembly as zip file, which can be extracted elsewhere with a shell script for windows and *nix\nbased systems.",[18,24449,24450],{},"You need to provide the full qualified path for wkhtmltopdf as first argument and the full qualified path of the output\nfile with file extension as second argument.",[18,24452,24453],{},"I did not implement any special CLI handling for this prototype.",[18,24455,24456,24457,24459],{},"You need to install wkhtmltopdf ",[53,24458,22408],{}," as a consequence. I installed wkhtmltopdf 0.11.0 rc2 on windows 7 x64 and\nwkhtmltopdf 0.9.9 on ubuntu 13.10 x64 (virtualization).",[18,24461,24462],{},"Even if writing some boilerplate is not that interesting, writing less boilerplate is! So instead of writing your own\nwheel, you might want to spend your energy on making another wheel feel more rounded.",[18,24464,24465],{},"Whether a script engine can be used in your production environment depends on your configuration, of course, but writing\nsome glue to reuse another solutions might be worth thinking about it.",[18,24467,24468],{},"The effort could be less in comparison to a full rewrite. Stick to a separation of interfaces and implementations and\nlet the environment decide.",[18,24470,24471],{},"The devs. from Rhino/Nashorn/JRuby did quite a good job! As well as the devs. from the mentioned libraries. You should\ncompile the project with Java 1.6(!), 1.7 and 1.8 and look at the results.",[18,24473,24474,24477,24481,24483,24487,24490,24494,24497,24501,24504,24508,24511,24515,24518,24522,24525,24529,24532,24536,24538,24542,24544,24548,24550,24554,24556,24560,24562,24566,24568,24572,24574],{},[53,24475,24476],{}," 1",[585,24478,24479],{"href":24479,"rel":24480},"http://en.wikipedia.org/wiki/Scripting_for_the_Java_Platform",[589],[53,24482,6565],{},[585,24484,24485],{"href":24485,"rel":24486},"http://stackoverflow.com/questions/11838369/where-can-i-find-a-list-of-available-jsr-223-scripting-languages",[589],[53,24488,24489],{}," 3",[585,24491,24492],{"href":24492,"rel":24493},"https://media.synyx.de/uploads//2014/01/synyx.sample.zip",[589],[53,24495,24496],{}," 4",[585,24498,24499],{"href":24499,"rel":24500},"https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace",[589],[53,24502,24503],{}," 5",[585,24505,24506],{"href":24506,"rel":24507},"http://requirejs.org/",[589],[53,24509,24510],{}," 6",[585,24512,24513],{"href":24513,"rel":24514},"http://de.wikipedia.org/wiki/Public-Key-Infrastruktur",[589],[53,24516,24517],{}," 7",[585,24519,24520],{"href":24520,"rel":24521},"https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/bind",[589],[53,24523,24524],{}," 8",[585,24526,24527],{"href":24527,"rel":24528},"http://prototypejs.org/doc/latest/language/Function/prototype/bind/",[589],[53,24530,24531],{}," 9",[585,24533,24534],{"href":24534,"rel":24535},"http://www.jruby.org/",[589],[53,24537,20399],{},[585,24539,24540],{"href":24540,"rel":24541},"https://github.com/pdfkit/pdfkit",[589],[53,24543,22371],{},[585,24545,24546],{"href":24546,"rel":24547},"http://jade-lang.com/",[589],[53,24549,22380],{},[585,24551,24552],{"href":24552,"rel":24553},"https://github.com/neuland/jade4j",[589],[53,24555,22386],{},[585,24557,24558],{"href":24558,"rel":24559},"http://blog.nicksieger.com/articles/2009/01/10/jruby-1-1-6-gems-in-a-jar/",[589],[53,24561,22392],{},[585,24563,24564],{"href":24564,"rel":24565},"http://yokolet.blogspot.de/2010/10/gems-in-jar-with-redbridge.html",[589],[53,24567,22398],{},[585,24569,24570],{"href":24570,"rel":24571},"http://maven.apache.org/",[589],[53,24573,22408],{},[585,24575,24576],{"href":24576,"rel":24577},"https://code.google.com/p/wkhtmltopdf/",[589],[607,24579,24580],{},"html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .s4XuR, html code.shiki .s4XuR{--shiki-default:#E36209;--shiki-dark:#FFAB70}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sJ8bj, html code.shiki .sJ8bj{--shiki-default:#6A737D;--shiki-dark:#6A737D}",{"title":48,"searchDepth":86,"depth":86,"links":24582},[24583,24584,24585,24586],{"id":23495,"depth":126,"text":23486},{"id":14574,"depth":126,"text":23540},{"id":23923,"depth":126,"text":23924},{"id":24230,"depth":126,"text":24231},[613],"2014-01-22T10:47:14","https://synyx.de/blog/code-gluse/",{},"/blog/code-gluse",{"title":23486,"description":48},"blog/code-gluse",[290,7265,24595,24596,24597,24598,24599,24600],"jruby","jsr-223","mozilla-rhino","oracle-nashorn","pdfkit","wkhtmltopdf","Code gluse Today’s post targets an API, which has been released on Dec. 11, 2006; the javax.scripting package [1] and a lot of good articles that have been written around…","M4C3LMe5gjOAJ7fTRdyMF5pjkdkf_FbgJp5K0AK4qnA",{"id":24604,"title":24605,"author":24606,"body":24607,"category":24896,"date":24897,"description":24898,"extension":617,"link":24899,"meta":24900,"navigation":499,"path":24901,"seo":24902,"slug":24611,"stem":24904,"tags":24905,"teaser":24908,"__hash__":24909},"blog/blog/talking-tech-to-the-business-guy.md","Talking tech to the business guy",[7799],{"type":11,"value":24608,"toc":24886},[24609,24612,24621,24686,24689,24693,24696,24707,24710,24714,24717,24739,24748,24752,24755,24797,24800,24804,24807,24811,24814,24818,24821,24829,24832,24836,24841,24843,24846,24866,24869,24872,24879],[14,24610,24605],{"id":24611},"talking-tech-to-the-business-guy",[18,24613,24614,24615,24620],{},"Every development project has a business guy attached, who holds the project money and makes the decisions what the team\nshould implement. That guy can be your customer, sales manager, product manager, the product owner in a scrum project or\nsimply your boss. In this article we will conveniently call him “manager”. Constant small refactoring, test coverage and\nother technical things that you do while developing features don’t really concern him. But from time to time you have a\nbig, technical issue, that does not have apparent business value and does not add any features. You see it as absolutely\nnecessary but you need the time and approval from your manager to do it. Watch this conversation between a developer and\nthe well known “pointy haired boss”, that I stole from\na ",[585,24616,24619],{"href":24617,"rel":24618,"title":24619},"http://programmers.stackexchange.com/questions/157928/how-to-justify-code-refactoring-time",[589],"stackexchange.com post","\nand that seems awkwardly familiar to every developer:",[11259,24622,24623,24629,24635,24640,24645,24650,24654,24659,24664,24669,24674,24678,24683],{},[18,24624,24625,24628],{},[573,24626,24627],{},"Developer",": We need time to do this technical redesign.",[18,24630,24631,24634],{},[573,24632,24633],{},"PHB",": Why?",[18,24636,24637,24639],{},[573,24638,24627],{},": It’ll make things easier to fix",[18,24641,24642,24644],{},[573,24643,24633],{},": So what?",[18,24646,24647,24649],{},[573,24648,24627],{},": It’ll increase throughput – we’ll get new builds out the door quicker?",[18,24651,24652,24644],{},[573,24653,24633],{},[18,24655,24656,24658],{},[573,24657,24627],{},": Err…Happier customers?",[18,24660,24661,24663],{},[573,24662,24633],{},": WTF?",[18,24665,24666,24668],{},[573,24667,24627],{},": I mean increased recommendations, greater satisfaction, more profit sooner due to low turnaround.",[18,24670,24671,24673],{},[573,24672,24633],{},": Oh! Sounds nice. But it only “sounds” nice can I see it?",[18,24675,24676,24663],{},[573,24677,24627],{},[18,24679,24680,24682],{},[573,24681,24633],{},": Show me some numbers!",[18,24684,24685],{},"…. and so on…",[18,24687,24688],{},"The problem is that the manager often can not see the benefits of technical necessities like refactorings, redesign,\nestablishing test coverage. How could he? He probably never learned their importance and the consequences when you omit\nthem. The guy who knows better and who’s job it is to convince the manager is you! Let’s talk about some aspects that\nwill help you do this.",[2207,24690,24692],{"id":24691},"_1-trust","1. Trust",[18,24694,24695],{},"Some of you will be surprised but in my opinion the most important factor in this matter is that your manager trusts\nyou! If he knows you well and has the feeling that you want the best for him and the project then there is a chance that\nthe conversation takes this path:",[11259,24697,24698,24702],{},[18,24699,24700,24628],{},[573,24701,24627],{},[18,24703,24704,24706],{},[573,24705,24633],{},": Well, it is not particularly cheap but you are the expert and your advice has always been valuable, let’s do\nit!",[18,24708,24709],{},"It doesn’t get any easier than that! The way to achieve this trust is to always be honest and straightforward and talk\nopenly. Don’t try to trick your manager into approving something by leaving out facts or by faking estimations.\nTransparency is key in the relationship between manager and developer. If you earn your trust, a good, non-sociopathic\nmanager will reward you with more freedom. The less your manager trusts you the more trouble you will have convincing\nhim of a technical necessity.",[2207,24711,24713],{"id":24712},"_2-technical-affinity-of-the-manager","2. Technical affinity of the manager",[18,24715,24716],{},"Try to figure out the technical affinity and experience of your manager. This will decide what kind of arguments will\nhelp your case. If he has a lot of technical experience, maybe even is a former developer himself, then chances are high\nthat you are able to convince him by technical reasoning. The conversation might go like this:",[11259,24718,24719,24724,24729,24734],{},[18,24720,24721,24723],{},[573,24722,24627],{},": Before we implement any new functionality we need to write tests for this legacy code first.",[18,24725,24726,24728],{},[573,24727,24633],{},": Why is that? We never had any tests back in the nineties when developing software while listening to MC Hammer?",[18,24730,24731,24733],{},[573,24732,24627],{},": Yeah, but remember that it was difficult to change anything without breaking another thing? Nowadays you\nwrite automated unit tests so you can always make sure that changes to the code don’t introduce unwanted side effects\ninto the software.",[18,24735,24736,24738],{},[573,24737,24633],{},": Oh yeah, that always bothered me. And unit tests sound like a good way to fix that problem. Let’s write some\ntests first!",[18,24740,24741,24742,24747],{},"Points like “only tested code is easy to change without side effects” or “refactoring and redesign\nare ",[585,24743,24746],{"href":24744,"rel":24745,"title":24746},"http://xprogramming.com/blog/why-is-refactoring-a-must/",[589],"necessary in an agile process","\nto adapt to the requirements” or even “no one writes code perfectly the first time” are likely to convince a tech-savvy\nmanager. But they won’t help with a manager, who doesn’t have any technical knowledge at all. If you are unlucky they\nconfuse him even more and drive him away from the decision you want him to make.",[2207,24749,24751],{"id":24750},"_3-find-a-leverage-that-the-manager-understands","3. Find a leverage that the manager understands",[18,24753,24754],{},"If you sense that the manager won’t understand what you are talking about, don’t blame him. He probably has his\nqualities elsewhere. Maybe you still can find a way to make your point. Most business guys think in buzzwords like time,\nbudget, quality. Get to know your manager’s favourite buzzword and try to tell him how your proposition helps this\nparticular priority.",[11259,24756,24757,24761,24766,24771,24777,24782,24788,24791],{},[18,24758,24759,24628],{},[573,24760,24627],{},[18,24762,24763,24634],{},[573,24764,24765],{},"Time-PHB",[18,24767,24768,24770],{},[573,24769,24627],{},": Because after that we can introduce new features into the software way faster!",[18,24772,24773,24776],{},[573,24774,24775],{},"Budget-PHB",": I don’t see the real benefit.",[18,24778,24779,24781],{},[573,24780,24627],{},": Well, the code will be easier to handle so we will need less resources to make changes. Even new\ndevelopers will be able to understand and expand the code without training.",[18,24783,24784,24787],{},[573,24785,24786],{},"Quality-PHB",": Meh, I still don’t see it!",[18,24789,24790],{},"Developer: But the refactored code will also be more robust to side effects so we will be able to introduce changes\nwithout fear of bugs.",[18,24792,24793,24796],{},[573,24794,24795],{},"Time-PHB, Budget-PHB, Quality-PHB unisonous",": THAT IS THE BEST THING EVER, WHY HAVEN’T WE ALREADY DONE THIS?",[18,24798,24799],{},"Also helpful are analogies to something that the manager knows. As an example one of the most famous analogies is to\nrefer to undone technical necessities by the phrase “technical debt”. If you explain to the manager that like real debt\nit is growing all the time and has to be paid back with interest then maybe he will understand the consequences.",[2207,24801,24803],{"id":24802},"_4-business-case","4. Business case",[18,24805,24806],{},"If the manager is too cautious and insists on numbers like in our first conversation with the PHB then it might be best\nto give them to him. Try to make an estimation on how much the development will be accelerated or on how much the\nquality will improve and put it into numbers to present to him as business case. You know quite well that the numbers\nwill be highly imprecise and probably won’t come true exactly as estimated but the business case will help your manager\nto see the benefits. You just have to clarify that the numbers are what they are – rough estimations. That way the\nmanager can make a more informed decision and no one can really blame you afterwards when the effects don’t meet the\nestimations.",[2207,24808,24810],{"id":24809},"_5-play-the-expert-card","5. Play the expert card",[18,24812,24813],{},"In a typical developer-manager-relationship you are not only the guy who codes but also a consultant. You are the\ntechnical expert and are entitled to advise your manager. Don’t be afraid to do that! Often it helps to say something in\nthe lines of “as your technical expert I strongly recommend to…” because it reminds the manager that you are the one who\nknows how to develop software. But keep in mind that you always have to back your recommendation with valid points and\nreasons! Playing the expert card and then lacking the arguments to explain your reasons is just a cheap bluff.",[2207,24815,24817],{"id":24816},"_6-pdd-pain-driven-development","6. PDD – Pain driven development",[18,24819,24820],{},"You really exerted yourself and tried everything to make your point. But everything fails. The manager ignores your\nadvice and insists on doing it his way. At some point it is best not to stress the matter any further and just leave it.\nYou know it will cause pain but sometimes pain is good. You will have the opportunity to use it for your argumentation\nnext time.",[11259,24822,24823,24826],{},[18,24824,24825],{},"“If we had any test coverage, half of the reported bugs would not have occured”.",[18,24827,24828],{},"“We have to make estimations that high, because it is difficult to implement new features in that part of the software\nwithout the big redesign we didn’t do three months ago”.",[18,24830,24831],{},"Just try not to make your manager look bad for his decisions. Blame improves nothing and your further cooperation will\nbe much more comfortable if you maintain a good relationship.",[2207,24833,24835],{"id":24834},"_7-understand-the-other-side","7. Understand the other side",[18,24837,24838,24839,986],{},"You should never forget that the manager might have good reason to reject your proposition. He has to make the project\nprofitable and he gains nothing if you do technical changes just because you think they are cool. Always reflect on\nyourself and try to figure out if your proposition really helps the project or if it is just something that sounded cool\nin some blog post ",[573,24840,9578],{},[2207,24842,20392],{"id":20391},[18,24844,24845],{},"That’s it, I hope this list will help you communicating your desires to your manager.",[18,24847,24848,24849,24852,24853,24858,24859,24861,24862,24865],{},"One last important piece of advice: To a certain degree technical issues are something your manager doesn’t have to know\nabout at all and doesn’t even want to know about. ",[573,24850,24851],{},"You"," are the\nsoftware ",[585,24854,24857],{"href":24855,"rel":24856,"title":24857},"http://manifesto.softwarecraftsmanship.org/",[589],"craftsman"," and often ",[573,24860,13713],{}," know best what is the\nright choice for your project. Things like constant refactoring and maintaining test coverage are natural parts of the\nsoftware development process and you ",[573,24863,24864],{},"have"," to do them to deliver a good product, you don’t have to ask for permission\nto do them.",[18,24867,24868],{},"It’s the same as with every other craftsmanship. You also don’t ask your plumber how he installs your bathtub, you only\ncare that he installs it at all and you know it takes the time it takes. And if it doesn’t start to leak at any time you\nwill never question that he did a good job.",[18,24870,24871],{},"Some Links:",[18,24873,24874],{},[585,24875,24878],{"href":24876,"rel":24877},"http://stackoverflow.com/questions/3138823/what-is-the-best-way-to-explain-refactoring-to-non-technical-people",[589],"stackoverflow.com thread on explaining refactoring to non-tech guys",[18,24880,24881],{},[585,24882,24885],{"href":24883,"rel":24884},"http://www.infoq.com/news/2010/07/explaining-refactoring",[589],"explaining refactoring to management",{"title":48,"searchDepth":86,"depth":86,"links":24887},[24888,24889,24890,24891,24892,24893,24894,24895],{"id":24691,"depth":86,"text":24692},{"id":24712,"depth":86,"text":24713},{"id":24750,"depth":86,"text":24751},{"id":24802,"depth":86,"text":24803},{"id":24809,"depth":86,"text":24810},{"id":24816,"depth":86,"text":24817},{"id":24834,"depth":86,"text":24835},{"id":20391,"depth":86,"text":20392},[613],"2014-01-15T08:11:56","Every development project has a business guy attached, who holds the project money and makes the decisions what the team\\nshould implement. That guy can be your customer, sales manager, product manager, the product owner in a scrum project or\\nsimply your boss. In this article we will conveniently call him “manager”. Constant small refactoring, test coverage and\\nother technical things that you do while developing features don’t really concern him. But from time to time you have a\\nbig, technical issue, that does not have apparent business value and does not add any features. You see it as absolutely\\nnecessary but you need the time and approval from your manager to do it. Watch this conversation between a developer and\\nthe well known “pointy haired boss”, that I stole from\\na stackexchange.com post\\nand that seems awkwardly familiar to every developer:","https://synyx.de/blog/talking-tech-to-the-business-guy/",{},"/blog/talking-tech-to-the-business-guy",{"title":24605,"description":24903},"Every development project has a business guy attached, who holds the project money and makes the decisions what the team\nshould implement. That guy can be your customer, sales manager, product manager, the product owner in a scrum project or\nsimply your boss. In this article we will conveniently call him “manager”. Constant small refactoring, test coverage and\nother technical things that you do while developing features don’t really concern him. But from time to time you have a\nbig, technical issue, that does not have apparent business value and does not add any features. You see it as absolutely\nnecessary but you need the time and approval from your manager to do it. Watch this conversation between a developer and\nthe well known “pointy haired boss”, that I stole from\na stackexchange.com post\nand that seems awkwardly familiar to every developer:","blog/talking-tech-to-the-business-guy",[7906,24906,18709,24907],"communication","project-management","Every development project has a business guy attached, who holds the project money and makes the decisions what the team should implement. That guy can be your customer, sales manager,…","ez7W0LZkIxZszGVbDbxm6yhrWEZ66IYuD1ffECT1xCY",{"id":24911,"title":24912,"author":24913,"body":24914,"category":25342,"date":25343,"description":25344,"extension":617,"link":25345,"meta":25346,"navigation":499,"path":25347,"seo":25348,"slug":24918,"stem":25350,"tags":25351,"teaser":25353,"__hash__":25354},"blog/blog/client-code-ignores-repository-implementations-developers-do-not.md","Client code ignores REPOSITORY implementations; developers do not",[13434],{"type":11,"value":24915,"toc":25338},[24916,24919,24934,24940,24943,25007,25022,25031,25059,25063,25070,25078,25085,25105,25115,25156,25167,25180,25202,25232,25238,25244,25273,25287,25298,25305,25307,25312,25315,25318,25328,25333,25336],[14,24917,24912],{"id":24918},"client-code-ignores-repository-implementations-developers-do-not",[18,24920,24921,24922,24925,24926,24929,24930,24933],{},"Our team is working on an application for one of our clients, a service provider for container logistics, shipping cargo\nbetween seaports, terminals and other loading sites. The business domain also includes the calculation of shipping\nprices subjected to the agreements met between the shipping company and its customers. We recently implemented the\nconcept of so called ",[573,24923,24924],{},"offers"," into the application, whereas each offer contains multiple ",[573,24927,24928],{},"terminal-specific prices",".\nOne or more offers may be assigned to a ",[573,24931,24932],{},"customer"," (see diagram below, capturing these domain concepts).",[18,24935,24936],{},[2223,24937],{"alt":24938,"src":24939},"\"domainmodel\"","https://media.synyx.de/uploads//2013/12/domainmodel.png",[18,24941,24942],{},"Our technology stack encompasses Spring Framework and JPA as the persistence technology. All the applications data is\nstored in a relational database.",[43,24944,24946],{"className":288,"code":24945,"language":290,"meta":48,"style":48},"\n@Entity\npublic class Offer {\n @ManyToMany(fetch = FetchType.EAGER)\n private Map\u003C Long, Price> importBargePrices;\n @ManyToMany(fetch = FetchType.EAGER)\n private Map\u003C Long, Price> exportBargePrices;\n @ManyToMany(fetch = FetchType.EAGER)\n private Map\u003C Long, Price> importRailPrices;\n @ManyToMany(fetch = FetchType.EAGER)\n private Map\u003C Long, Price> exportRailPrices;\n ...\n}\n\n",[50,24947,24948,24952,24957,24962,24967,24972,24976,24981,24985,24990,24994,24999,25003],{"__ignoreMap":48},[53,24949,24950],{"class":55,"line":56},[53,24951,500],{"emptyLinePlaceholder":499},[53,24953,24954],{"class":55,"line":86},[53,24955,24956],{},"@Entity\n",[53,24958,24959],{"class":55,"line":126},[53,24960,24961],{},"public class Offer {\n",[53,24963,24964],{"class":55,"line":163},[53,24965,24966],{}," @ManyToMany(fetch = FetchType.EAGER)\n",[53,24968,24969],{"class":55,"line":186},[53,24970,24971],{}," private Map\u003C Long, Price> importBargePrices;\n",[53,24973,24974],{"class":55,"line":221},[53,24975,24966],{},[53,24977,24978],{"class":55,"line":242},[53,24979,24980],{}," private Map\u003C Long, Price> exportBargePrices;\n",[53,24982,24983],{"class":55,"line":273},[53,24984,24966],{},[53,24986,24987],{"class":55,"line":279},[53,24988,24989],{}," private Map\u003C Long, Price> importRailPrices;\n",[53,24991,24992],{"class":55,"line":496},[53,24993,24966],{},[53,24995,24996],{"class":55,"line":503},[53,24997,24998],{}," private Map\u003C Long, Price> exportRailPrices;\n",[53,25000,25001],{"class":55,"line":509},[53,25002,276],{},[53,25004,25005],{"class":55,"line":515},[53,25006,282],{},[18,25008,25009,25010,25013,25014,25017,25018,25021],{},"Note the ",[573,25011,25012],{},"collection-valued associations"," between offer and price: in the current implementation these have been\nconfigured to be ",[573,25015,25016],{},"eagerly"," fetched. Setting the fetch type to ",[573,25019,25020],{},"eager loading"," forces the JPA provider to instantly fetch\nthese entity attributes from the database when an offer is read.",[18,25023,25024,25025,25030],{},"Beyond JPA we also use ",[585,25026,25029],{"href":25027,"rel":25028},"http://projects.spring.io/spring-data/",[589],"Spring Data",". So, there’s a repository representing a\ncollection of offers and encapsulating the internal details of database access:",[43,25032,25034],{"className":288,"code":25033,"language":290,"meta":48,"style":48},"\npublic interface OfferRepository extends\n JpaRepository\u003C Offer, Long> {\n Offer findByLabel(String label);\n}\n\n",[50,25035,25036,25040,25045,25050,25055],{"__ignoreMap":48},[53,25037,25038],{"class":55,"line":56},[53,25039,500],{"emptyLinePlaceholder":499},[53,25041,25042],{"class":55,"line":86},[53,25043,25044],{},"public interface OfferRepository extends\n",[53,25046,25047],{"class":55,"line":126},[53,25048,25049],{}," JpaRepository\u003C Offer, Long> {\n",[53,25051,25052],{"class":55,"line":163},[53,25053,25054],{}," Offer findByLabel(String label);\n",[53,25056,25057],{"class":55,"line":186},[53,25058,282],{},[649,25060,25062],{"id":25061},"when-things-went-wrong","When things went wrong",[18,25064,25065,25066,25069],{},"Before deploying our application to production, we did some ",[573,25067,25068],{},"exploratory testing"," in a production-like environment.\nDuring testing, we realized that the application ran out of memory when doing one of the the following operations:",[577,25071,25072,25075],{},[580,25073,25074],{},"loading an offer from the database (in order to display its details)",[580,25076,25077],{},"assigning an offer to a customer",[18,25079,25080,25081,25084],{},"These two use cases have been tested constantly by our acceptance tests, every time the ",[573,25082,25083],{},"Continuous Integration"," server\nfinnished building and deploying the application into a test server. Nevertheless, both use cases now turn out to be\nbroken in a production-like environment. One obvious difference between the environment for CI acceptance testing and\nthe production-like environment is the amount of data stored in the database.",[18,25086,25087,25088,25093,25094,17307,25097,25100,25101,25104],{},"Java Virtual Machine monitoring and profiling\nwith ",[585,25089,25092],{"href":25090,"rel":25091},"http://docs.oracle.com/javase/7/docs/technotes/tools/share/jvisualvm.html",[589],"jvisualvm"," revealed huge memory\nconsumption when reconstituting a single stored offer entity using the repository method ",[50,25095,25096],{},"findOne",[50,25098,25099],{},"OfferRepository","\nextends ",[50,25102,25103],{},"JpaRepository"," and therefore provides the aforementioned method;",[18,25106,25107,25108,25111,25112,25114],{},"Spring Datas ",[50,25109,25110],{},"SimpleJpaRepository"," implements this method to simply call JPA EntityManagers ",[50,25113,15077],{}," method.",[43,25116,25118],{"className":288,"code":25117,"language":290,"meta":48,"style":48},"\n@Override\npublic T findOne(ID id) {\n ...\n Class\u003C T> domainType = getDomainClass();\n return type == null ? em.find(domainType, id)\n : em.find(domainType, id, type);\n}\n\n",[50,25119,25120,25124,25128,25133,25137,25142,25147,25152],{"__ignoreMap":48},[53,25121,25122],{"class":55,"line":56},[53,25123,500],{"emptyLinePlaceholder":499},[53,25125,25126],{"class":55,"line":86},[53,25127,14489],{},[53,25129,25130],{"class":55,"line":126},[53,25131,25132],{},"public T findOne(ID id) {\n",[53,25134,25135],{"class":55,"line":163},[53,25136,276],{},[53,25138,25139],{"class":55,"line":186},[53,25140,25141],{}," Class\u003C T> domainType = getDomainClass();\n",[53,25143,25144],{"class":55,"line":221},[53,25145,25146],{}," return type == null ? em.find(domainType, id)\n",[53,25148,25149],{"class":55,"line":242},[53,25150,25151],{}," : em.find(domainType, id, type);\n",[53,25153,25154],{"class":55,"line":273},[53,25155,282],{},[18,25157,25158,25159,25162,25163,25166],{},"Analyzing the SQL that gets constructed and executed by the JPA provider discloses a relatively big number of ",[573,25160,25161],{},"outer\njoins"," in the generated ",[573,25164,25165],{},"select"," statement; this is not an issue with Spring Data.",[11259,25168,25169,25172,25175],{},[18,25170,25171],{},"“Spring Data JPA itself does not control the interaction with the database directly. All it does is interacting with\nthe EntityManager, so effectively all behavioral effects are defined by JPA and",[18,25173,25174],{},"the underlying OR-mapper.”",[18,25176,25177],{},[53,25178,25179],{},"Oliver Gierke, Spring Data Project",[18,25181,25182,25183,25186,25187,25189,25190,25192,25193,25195,25196,25198,25199,25201],{},"Introducing the finder method ",[50,25184,25185],{},"findById"," into ",[50,25188,25099],{}," and replacing calls to ",[50,25191,25096],{}," by calls to ",[50,25194,25185],{},"\nhad an effect on the generated SQL: instead of one ",[573,25197,25165],{}," statement with more than twenty outer joins, we now have\nmultiple ",[573,25200,25165],{}," statements each having three outer joins at most.",[43,25203,25205],{"className":288,"code":25204,"language":290,"meta":48,"style":48},"\npublic interface OfferRepository extends\n JpaRepository\u003C Offer, Long> {\n Offer findById(Long id);\n Offer findByLabel(String label);\n}\n\n",[50,25206,25207,25211,25215,25219,25224,25228],{"__ignoreMap":48},[53,25208,25209],{"class":55,"line":56},[53,25210,500],{"emptyLinePlaceholder":499},[53,25212,25213],{"class":55,"line":86},[53,25214,25044],{},[53,25216,25217],{"class":55,"line":126},[53,25218,25049],{},[53,25220,25221],{"class":55,"line":163},[53,25222,25223],{}," Offer findById(Long id);\n",[53,25225,25226],{"class":55,"line":186},[53,25227,25054],{},[53,25229,25230],{"class":55,"line":221},[53,25231,282],{},[18,25233,25234,25235,25237],{},"In both cases, because the offers collection-valued associations are configured to be eagerly loaded, price data is\nimplicitly fetched from the database when an offer is initially read. And in case of using ",[50,25236,25096],{},", the generated SQL\nstatement causes the application to crash with an OutOfMemoryError.",[18,25239,25240,25241,25243],{},"But just replacing any call of ",[50,25242,25096],{}," falls too short. Look at the following code that assigns the specified offer to\na customer:",[43,25245,25247],{"className":288,"code":25246,"language":290,"meta":48,"style":48},"\nOffer offer = offerRepository.findById(offerId);\nCustomer customer = customerRepository.findById(customerId);\ncustomer.addOffer(offer);\ncustomerRepository.save(customer);\n\n",[50,25248,25249,25253,25258,25263,25268],{"__ignoreMap":48},[53,25250,25251],{"class":55,"line":56},[53,25252,500],{"emptyLinePlaceholder":499},[53,25254,25255],{"class":55,"line":86},[53,25256,25257],{},"Offer offer = offerRepository.findById(offerId);\n",[53,25259,25260],{"class":55,"line":126},[53,25261,25262],{},"Customer customer = customerRepository.findById(customerId);\n",[53,25264,25265],{"class":55,"line":163},[53,25266,25267],{},"customer.addOffer(offer);\n",[53,25269,25270],{"class":55,"line":186},[53,25271,25272],{},"customerRepository.save(customer);\n",[18,25274,25275,25276,25279,25280,25283,25284,986],{},"First both, the offer and the customer (each specified by Id) are queried from the database. Then, the offer is assigned\nto the customer and finally the customer gets updated. A programmer not aware of JPA and Spring Data internals might\noverlook that ",[50,25277,25278],{},"save",", which is actually provided by Spring Data, invokes the JPA EntityManagers ",[50,25281,25282],{},"merge"," method and he\nmight not know the details of the ",[27,25285,25286],{},"JPA concepts of detachment, merging and transaction-scoped persistence contexts",[18,25288,25289,25290,25293,25294,25297],{},"Now, assume that retrieving the offer and customer and eventually updating the customer are executed not within the same\n",[573,25291,25292],{},"transaction",". This means that the offer entity is no longer in managed state when the JPA merge operation does its\nwork. JPA ",[573,25295,25296],{},"persistence contexts"," are tied to the lifecycle of a transaction which implies that the JPA provider again\nreads the offer from database, instantly fetching all of its collection-valued associations, which in our case causes\nthe application to run out of memory (again, there are lots of outer joins in the select statement sent to the\ndatabase).",[18,25299,25300,25301,25304],{},"Ensuring that reading offers and updating the customer are handled in the same transaction fixes the memory issue, but\nthe better approach would have been to avoid eager loading at all. This not only decreases the impact on memory but also\nreduces the amount of SQL and speeds up the queries and object loading. These are the reasons that JPA defaults to ",[573,25302,25303],{},"lazy\nloading"," (aka deferred loading, on-demand fetching) for collection-valued associations; but keep in mind that lazy\nloading is just a hint to the JPA provider, i.e. behaviour will depend on its implementation.",[649,25306,12384],{"id":12383},[18,25308,25309],{},[27,25310,25311],{},"Deepen the knowledge of the technologies you are using",[18,25313,25314],{},"Care about the internal details of the persistence technology. Generally speaking, decoupling clients from repository\nimplementation and the underlying technology is great, but this does not relieve the developers from the need to\nunderstand the consequences of using encapsulated behaviour, including its performance and resource-related\nimplications.",[18,25316,25317],{},"In other words:",[11259,25319,25320,25323],{},[18,25321,25322],{},"“Client code ignores REPOSITORY implementations; developers do not”",[18,25324,25325],{},[53,25326,25327],{},"Eric Evans, Domain-Driven Design",[18,25329,25330],{},[27,25331,25332],{},"Measure, don’t guess",[18,25334,25335],{},"Testing the application should not only be done on a regular basis and in an automated fashion, but equally important,\nperformance measuring and memory monitoring in production-like environments is essential and must not be ignored. In an\niterative development process, these tests should be done in each iteration.",[607,25337,989],{},{"title":48,"searchDepth":86,"depth":86,"links":25339},[25340,25341],{"id":25061,"depth":126,"text":25062},{"id":12383,"depth":126,"text":12384},[613],"2013-12-28T15:21:04","Our team is working on an application for one of our clients, a service provider for container logistics, shipping cargo\\nbetween seaports, terminals and other loading sites. The business domain also includes the calculation of shipping\\nprices subjected to the agreements met between the shipping company and its customers. We recently implemented the\\nconcept of so called offers into the application, whereas each offer contains multiple terminal-specific prices.\\nOne or more offers may be assigned to a customer (see diagram below, capturing these domain concepts).","https://synyx.de/blog/client-code-ignores-repository-implementations-developers-do-not/",{},"/blog/client-code-ignores-repository-implementations-developers-do-not",{"title":24912,"description":25349},"Our team is working on an application for one of our clients, a service provider for container logistics, shipping cargo\nbetween seaports, terminals and other loading sites. The business domain also includes the calculation of shipping\nprices subjected to the agreements met between the shipping company and its customers. We recently implemented the\nconcept of so called offers into the application, whereas each offer contains multiple terminal-specific prices.\nOne or more offers may be assigned to a customer (see diagram below, capturing these domain concepts).","blog/client-code-ignores-repository-implementations-developers-do-not",[25352,1011,9417],"jpa","Our team is working on an application for one of our clients, a service provider for container logistics, shipping cargo between seaports, terminals and other loading sites. The business domain…","BRuzgHD8_0uoriwcr4dYqNX0E6AtuiiaImID2mFvZ_g",{"id":25356,"title":25357,"author":25358,"body":25359,"category":25576,"date":25577,"description":48,"extension":617,"link":25578,"meta":25579,"navigation":499,"path":25580,"seo":25581,"slug":25363,"stem":25582,"tags":25583,"teaser":25584,"__hash__":25585},"blog/blog/nosql-matters-it-does-but-think-about-your-data.md","NoSQL matters – It does! But think about your data!",[19894],{"type":11,"value":25360,"toc":25567},[25361,25364,25368,25380,25384,25387,25391,25394,25398,25428,25455,25458,25462,25469,25475,25485,25491,25497,25503,25509,25516,25520],[14,25362,25357],{"id":25363},"nosql-matters-it-does-but-think-about-your-data",[2207,25365,25367],{"id":25366},"the-confernence-venue","The confernence venue",[18,25369,10847,25370,25372,25373,25375,25376,25379],{},[27,25371,19907],{}," took place in Barcelona, Spain, from 29-30 November. Barcelona is a big, beautiful (\nbut crowded) city. The conference venue, the ",[573,25374,19913],{},", belongs to the complex of the ",[573,25377,25378],{},"Hospital de la Santa\nCreu i Sant Pau"," which was declared World Cultural Heritage Site by the UNESCO. It has a great atmosphere! The\nconference itself was sold out, and thus more than 150 participants came together to discuss about the field of NoSQL\nand related technologies. It was well organized, and the schedule left time for discussions and to change the rooms. The\nconcluding ‘session’ brought all the participants together for tapas and beer and encouraged them for further lively\ndiscussions.",[2207,25381,25383],{"id":25382},"day-1-the-training-day","Day 1: The training day",[18,25385,25386],{},"The first day was a training day. The training sessions were concerned with some of the available NoSQL stores (the\ngraph database Neo4j or the key-value store Riak) or how to model in a NoSQL world. I myself did not participate in the\ntraining sessions, as I arrived late on Friday 29 November in Barcelona.",[2207,25388,25390],{"id":25389},"day-2-the-session-day","Day 2: The session day",[18,25392,25393],{},"The second day of the conference was a session day with talks covering all the different nuances of the NoSQL world. It\nwas a very interesting day, with lots of things to learn and with lots of opportunities to discuss the presented topics.\nI will not report on all the talks I visited in detail, but will only briefly summarize them. But before that, I will\npoint some of the points that I (for myself) found most interesting.",[649,25395,25397],{"id":25396},"the-gist-think-about-your-data","The gist: Think about your data",[18,25399,25400,25401,25404,25405,11792,25410,25404,25413,25418,25419,25424,25425,25427],{},"A main concern of many speakers was to make developers think about their applications (again): What do these demand, and\nwhat are the best fitting solutions and architectures! Not ‘",[573,25402,25403],{},"one-size-fits-all","‘ ",[573,25406,25407,1073],{},[53,25408,25409],{},"Stonebraker 2005",[573,25411,25412],{},"‘there\nis no free lunch with distributed data",[573,25414,25415],{},[53,25416,25417],{},"HP 2005","! To efficiently use the full potential the new stores and\ntechnologies offer, a basic understanding of the database developments of the last about 50 years is helpful. Starting\nfrom hierarchical and network CODASYL systems, Codds abstraction to tuples was a big step forward ",[573,25420,25421],{},[53,25422,25423],{},"Codd 1970",".\nApplications were freed from caring how data is represented on the storage system, the processes of normalization avoids\nredundancy and anomalies, and the ",[27,25426,22088],{}," properties were desired in most business applications. Further, the\nmathematically precise definition of the tuple calculus made the theory formally accessible by set theory.",[18,25429,25430,25431,25433,25434,99,25439,25444,25445,25450,25451,25454],{},"But the world is in constant change: High availability and horizontal scaling are crucial points in todays business (\nweb) applications, and consistency might sometimes be weakened in these environments, so ",[27,25432,21993],{}," can be an appropriate\nalternative consistency model to the ‘good old’ ACID. With BASE come the phenomena the CAP theorem states ",[573,25435,25436],{},[53,25437,25438],{},"Brewer\n2002",[573,25440,25441],{},[53,25442,25443],{},"Gilbert 2002",". Some speakers warned: Keep in mind that relational database systems can be a perfectly sound\nchoice! To design suitable applications and to make use of the powerful possibilities of ‘polyglot persistence’\n",[573,25446,25447],{},[53,25448,25449],{},"Fowler 2010"," and the ‘Lambda architecture for big data’ ",[53,25452,25453],{},"Marz 2012",", the needs of the application have to be well\nunderstood, otherwise a sound choice from the high number of database stores is impossible.",[18,25456,25457],{},"Complexity isolation, i. e. partitioning the demands in small parts and address these, can be a good way to achieve good\nsoftware. Schemas are often desirable, as they introduce structure in your data based on explicit knowledge that can be\nexploited to maintain a systems’ integrity.",[649,25459,25461],{"id":25460},"the-talks","The talks",[18,25463,25464,25465,25468],{},"The conferences great keynote was given by ",[27,25466,25467],{},"N. Marz",". He introduced the ‘Doofus Programmer’ (that we all are) and\ndiscussed the ‘insanity’ of the complexity of the database world. His proposal: To go for ‘Human-Fault-Tolerant’\nsystems by using immutable (but versioned) databases, schemas, precomputation, complexity isolation and the Lambda\narchitecture.",[18,25470,25471,25474],{},[27,25472,25473],{},"C. Gormley"," introduced Elasticsearch and its possibilities in detail, showing many examples that demonstrated\npowerful indexing options. Elasticsearch is a distributed document store, capable to achieve near-real-time data\nanalysis.",[18,25476,25477,25478,25481,25482,25484],{},"The talk by ",[27,25479,25480],{},"M. Hausenblas"," was about the ",[573,25483,19951],{}," and how to use NoSQL technologies to harness it. The\namount of data collected on a daily basis is huge. Pro-active (automotive) services, optimization of (logistic)\nprocesses, patient monitoring and smart houses and cities are just a few examples. The key technologies to cope with the\ndata can be polyglot persistence and the Lambda architecture. Use different stores for different needs!",[18,25486,25487,25490],{},[27,25488,25489],{},"D. Turnbull"," (a historian and computer scientist) presented a nice historical review on database evolution, starting\nfrom hierarchical and network systems that ‘evolved’ to NoSQL. He discussed the needs that led to the changes in data\nmodeling.",[18,25492,25493,25496],{},[27,25494,25495],{},"U. Friedrichsen"," nicely illustrated the pitfalls of modeling in a BASE world, not with the aim to scare developers,\nbut to enforce them to think about data and consistency properties. Different consistency modes need different\napproaches, and again: The best fitting store matching the applications demands has to be found! He presented code\nexamples that demonstrated how to deal with some of the BASE ‘phenomena’, and how to achieve consistency properties like\n‘read-your-own-writes’.",[18,25498,25499,25502],{},[27,25500,25501],{},"D. Mytton",", founder of Server density, showed an example of how a replicated, fast out-of-the-box fault tolerant\ndatabase store can be build up with MongoDB. The solution is used for time series analysis and able to serve up to 3333\nwrites/s with fast response time.",[18,25504,25505,25508],{},[27,25506,25507],{},"J. Reijn"," showed how real-time visitor analysis can be achieved by combining Elasticsearch with Couchbase as part of\nHippo CMS. He also gave a nice example for choosing stores according to needs: As part of the system Apache Jackrabbit\nis used, which is a hierarchical (key/value) store.",[18,25510,25511,25512,25515],{},"A scale-in approach to databases was presented by ",[27,25513,25514],{},"N. Björkman",". As hardware and especially RAM are relatively cheap\navailable these days, memory centric databases are possible. By holding database and applications in the same RAM (and\nletting them share heap space), performance can be significantly increased, and object mapping can be completely\navoided. The system Starcounter offers full ACID compliance, a native .NET API and SQL support.",[2207,25517,25519],{"id":25518},"some-recommended-articles","Some recommended articles",[577,25521,25522,25527,25532,25537,25543,25548,25558],{},[580,25523,25524,25526],{},[53,25525,25409],{}," Stonebraker, Çetintemel; “One Size Fits All”: An Idea Whose Time Has Come and Gone, Proceedings of\nthe 21st International Conference on Data Engineering ICDE ’05, IEEE Computer Society, 2005.",[580,25528,25529,25531],{},[53,25530,25417],{}," HP; There is no free lunch with distributed data white paper, 2005.",[580,25533,25534,25536],{},[53,25535,25423],{}," Codd; Relational Model of Data for Large Shared Data Banks, Communications of the ACM 13 : 6, 1970.",[580,25538,25539,25542],{},[53,25540,25541],{},"Brewer 2000"," Brewer; Towards Robust Distributed Systems, Keynote at PODC, 2000.",[580,25544,25545,25547],{},[53,25546,25443],{}," Gilbert, Lynch; Brewer’s Conjecture and the Feasability of Consitent, Available and\nPartition-Tolerant Web Services, ACM SIGACT News 23:2, 2002.",[580,25549,25550,25552,25553,986],{},[53,25551,25449],{},"\nFowler; ",[585,25554,25557],{"href":25555,"rel":25556},"http://www.martinfowler.com/bliki/PolyglotPersistence.html",[589],"http://martinfowler.com/bliki/PolyglotPersistence.html",[580,25559,25560,25562,25563,986],{},[53,25561,25453],{}," Marz; Big Data – Principles and best practices of scalable realtime data systems, Early access\nedition, ",[585,25564,25565],{"href":25565,"rel":25566},"http://manning.com/marz",[589],{"title":48,"searchDepth":86,"depth":86,"links":25568},[25569,25570,25571,25575],{"id":25366,"depth":86,"text":25367},{"id":25382,"depth":86,"text":25383},{"id":25389,"depth":86,"text":25390,"children":25572},[25573,25574],{"id":25396,"depth":126,"text":25397},{"id":25460,"depth":126,"text":25461},{"id":25518,"depth":86,"text":25519},[613],"2013-12-04T18:46:02","https://synyx.de/blog/nosql-matters-it-does-but-think-about-your-data/",{},"/blog/nosql-matters-it-does-but-think-about-your-data",{"title":25357,"description":48},"blog/nosql-matters-it-does-but-think-about-your-data",[22431,22435,23189],"The confernence venue The NoSQL matters conference took place in Barcelona, Spain, from 29-30 November. Barcelona is a big, beautiful (but crowded) city. The conference venue, the Casa Convalescència, belongs…","dKIi4VvIzPDp2qWRAcFIpHOANb0kdNp85J5KG52F5Xc",{"id":25587,"title":25588,"author":25589,"body":25591,"category":25661,"date":25662,"description":25663,"extension":617,"link":25664,"meta":25665,"navigation":499,"path":25666,"seo":25667,"slug":25595,"stem":25668,"tags":25669,"teaser":25674,"__hash__":25675},"blog/blog/all-aboard-the-google-train-das-gdg-devfest-2013-in-karlsruhe.md","All aboard the Google train – Das GDG DevFest 2013 in Karlsruhe",[25590],"kraft",{"type":11,"value":25592,"toc":25659},[25593,25596,25599,25605,25608,25614,25617,25620,25623,25629,25632,25638,25641,25644,25647,25653,25656],[14,25594,25588],{"id":25595},"all-aboard-the-google-train-das-gdg-devfest-2013-in-karlsruhe",[18,25597,25598],{},"Die Google Developer Group lud nach dem sehr gut besuchten letztjährigen DevFest auch in diesem Jahr wieder alle\nInteressierten in die Duale Hochschule Karlsruhe (ehemals Berufsakademie) ein, um einen mit spannenden Vorträgen und\nWorkshops gespickten Tag ganz im Zeichen der Google Produkte zu erleben, und hoffentlich viel Interessantes und Neues zu\nentdecken und zu lernen.",[18,25600,25601],{},[2223,25602],{"alt":25603,"src":25604},"\"Der synyx Sponsorentisch auf dem GDG DevFest 2013\"","https://media.synyx.de/uploads//2013/11/tisch.jpg",[18,25606,25607],{},"Wie auch im letzten Jahr, war synyx auch dieses Mal wieder als \u0004S\u0005ponsor tätig und natürlich auch vor Ort präsent um\nFragen rund um unsere Projekte, Firma, Arbeitsweisen und vieles mehr zu beantworten. Los ging das ganze Samstags morgens\nbereits um 8:30 Uhr mit der Anmeldung und Registrierung der Teilnehmer. Trotz der doch sehr frühen Stunde traf das\nhauptsächlich aus Studenten bestehende Publikum recht zügig ein, versorgte sich an der Anmeldung mit Namensschildchen\nund T-Shirts.",[18,25609,25610],{},[2223,25611],{"alt":25612,"src":25613},"\"Eines der vielen Gespräche mit einer an synyx und unserem Geschäftsmodell interessierten Person.\"","https://media.synyx.de/uploads//2013/11/gespraech.jpg",[18,25615,25616],{},"Bereits vor Beginn der eigentlichen Veranstaltung um 9:15 Uhr war schon Zeit für erste Gespräche mit interessierten\nTeilnehmern. Erfreulicherweise hatten viele der Teilnehmer das grüne synyx-S bereits mehrfach gesehen und wir wurden\nmehrfach darauf angesprochen, dass es toll sei, dass synyx gerade hier in Karlsruhe immer wieder als einer der Sponsoren\nzu sehen sei, die solche für die Teilnehmer kostenlosen Veranstaltungen ermöglichen. Das ist natürlich immer erfreulich\nzu hören, wenn einem solch ein positives Feedback zu den eigenen Marketing- und Sponsoring-Aktionen entgegenschlägt.",[18,25618,25619],{},"Bereits ganz am Anfang zeigten sich viele der Teilnehmer ganz angetan von den grünen Anti-Stress-Bällen, welche wir\nzwar eigentlich nicht mehr offiziell „im Programm“ haben, aber auch die noch im Lager befindlichen Restbestände fanden\nhier auf dem DevFest reißenden Absatz.",[18,25621,25622],{},"Um 9:15 Uhr ging es dann nach kurzen, wohl bei solchen Veranstaltungen immer wieder unvermeidlichen, technischen\nProblemen mit der Soundanlage auch wirklich wie geplant mit der Begrüßung aller Teilnehmer los. Die Veranstaltung war ja\nauf maximal 200 Teilnehmer begrenzt, relativ schnell ausgebucht und die Warteliste lang. Leider haben es trotzdem über\n40 Personen vorgezogen ohne Abmeldung einfach nicht zu erscheinen. Das ist wohl die „Kostet ja nichts, egal!“ Mentalität\nheutzutage.",[18,25624,25625],{},[2223,25626],{"alt":25627,"src":25628},"\"Unsere Anti-Stress-Bälle fanden reißenden Absatz.\"","https://media.synyx.de/uploads//2013/11/balls.jpg",[18,25630,25631],{},"Die Keynote über das Problem des Internet der Dinge wurde dann auch bereits ein großer Erfolg mit vielen Lachern und\neinigen „Wow!“ Elementen, als Kai Kreuzer anhand praktischer Beispiele im Bereich Wohnraumautomation gezeigt hat, was\nheutzutage mit wenigen Handgriffen und mit einer Handvoll Code im Controller alles möglich ist. Sein Showcase war dabei\neine RGB LED-Lampe. Die er von der Standard-Fernsteuerung nach und nach umgebaut und erweitert hat. Vom kabellosen\nLichtschalter, welcher die Sendeenergie aus dem Tastendruck selbst bezieht bis hin zur 3D \u0004Gestensteuerung\u0005 zum Schalten\ndes Lichts und zum Farbwechsel war alles dabei und hat doch einige erstaunte Gesichter hervorgerufen.",[18,25633,25634],{},[2223,25635],{"alt":25636,"src":25637},"\"Unsere tollen synyx Kugelschreiber und die Flyer\"","https://media.synyx.de/uploads//2013/11/flyer.jpg",[18,25639,25640],{},"In der ersten Kaffeepause kam dann der große Run an die Sponsorentische. Wir kamen gar nicht mehr hinterher mit dem\nNachlegen von Anti-Stress-Bällen, Kugelschreibern, Schlüsselbändern und Flyern. Aber dafür haben wir diese ja auch\nmitgebracht. Man ließ sich belegte Brötchen und Kaffee schmecken und bereitete sich auf den ersten beiden Vorträge mit\nden Themen Google Maps sowie JavaScript und HTML 5 vor. Während dieser Vorträge nutzten einige Teilnehmer die\nGelegenheit, uns detailliert über unsere Projekte, Vorgehensweisen, Philosophie und natürlich über die Jobmöglichkeiten\nbei synyx auszufragen. Wir hätten locker direkt 10 neue Kollegen vor Ort rekrutieren können.",[18,25642,25643],{},"Passend zur Mittagspause wurde die bereits eingangs in der Keynote gezeigte Gestensteuerung mit Hilfe einer Leap Motion\nals Showcase mit Hilfe von Google Earth und einem großen Beamer aufgebaut. Hier war es dann möglich, mit Hilfe von\nHandbewegungen innerhalb Google Earth zu zoomen, zu scrollen, den Globus zu drehen, den Kameraneigungswinkel zu\nverstellen und vieles mehr. Durch die hohe Anzahl der möglichen Freiheitsgrade in dem System war es gar nicht so\neinfach, die Kamera da hin zu steuern, wo man wollte. Aber zwei, drei Minuten Übung haben ausgereicht, die Kamera nach\nAbschluss seines Rundflugs um die Welt wieder zielsicher auf die Duale Hochschule auszurichten, auch wenn sich einige\ndann doch bei Tauchgängen im Pazifik völlig verirrt hatten.",[18,25645,25646],{},"Nach dem Mittagessen war dann Workshop-Zeit. Hierzu gab es drei unterschiedliche Tracks. Der erste Track widmete sich\nhauptsächlich der Webprogrammierung mit GWT und jQuery und war relativ visualisierungslastig. Im zweiten Track drehte\nsich alles um das Smartphone-Betriebssystem Android und im Rahmen eines CodeLabs wurde hier eine Einführung in die Welt\nder mobilen Programmierung gegeben. Der dritte Track war mit der Google App Engine ganz den Cloud-Diensten von Google\ngewidmet. Auch hier wurde fleißig direkt am „lebenden“ Beispiel gezeigt, was alles möglich ist.",[18,25648,25649],{},[2223,25650],{"alt":25651,"src":25652},"\"Der komplette synyx Sponsorenstand mit Chef\"","https://media.synyx.de/uploads//2013/11/stand.jpg",[18,25654,25655],{},"Bei der zweite Kaffeepause war dann ein großes Hallo unter den Teilnehmern, denn es gab Schokoriegel für alle und zwar\nin einer so großen Menge, dass selbst Abends noch welche übrig waren obwohl die meisten gleich zwei oder drei mit in den\nzweiten Teil der Workshops genommen haben, um das 16 Uhr Loch gut zu überstehen. Auch die große „Unboxing“ Aktion, bei\nder ein riesiges Paket voller Google-Giveaways ausgepackt wurde, fand großen Anklang bei den Teilnehmern. Manch einer\nkonnte nach einem wilden Goodies-in-die-brodelnde-Menge-werfen tatsächlich nun 4 neue T-Shirts sein eigen nennen.",[18,25657,25658],{},"Nach Abschluss des zweiten Teils der Workshops gab es dann zum Abschluss noch ein kleines Fest im Foyer der dualen\nHochschule mit Musik und vor allem Bier für alle. Hier nutzten zahlreiche Teilnehmer die Gelegenheit noch einmal mit dem\nSpeakern und Organisatoren vertiefende Gespräche zu führen, bevor dann um 19 Uhr ein gelungender und sehr informativer\nAbend zu Ende ging.",{"title":48,"searchDepth":86,"depth":86,"links":25660},[],[613,996],"2013-11-26T15:53:29","Die Google Developer Group lud nach dem sehr gut besuchten letztjährigen DevFest auch in diesem Jahr wieder alle\\nInteressierten in die Duale Hochschule Karlsruhe (ehemals Berufsakademie) ein, um einen mit spannenden Vorträgen und\\nWorkshops gespickten Tag ganz im Zeichen der Google Produkte zu erleben, und hoffentlich viel Interessantes und Neues zu\\nentdecken und zu lernen.","https://synyx.de/blog/all-aboard-the-google-train-das-gdg-devfest-2013-in-karlsruhe/",{},"/blog/all-aboard-the-google-train-das-gdg-devfest-2013-in-karlsruhe",{"title":25588,"description":25598},"blog/all-aboard-the-google-train-das-gdg-devfest-2013-in-karlsruhe",[25670,25671,25672,25673,6885],"gdg","gdg-devfest","google","sponsoring","Die Google Developer Group lud nach dem sehr gut besuchten letztjährigen DevFest auch in diesem Jahr wieder alle Interessierten in die Duale Hochschule Karlsruhe (ehemals Berufsakademie) ein, um einen mit…","96xZVpDlY5yDG8ur6OQcVNReUJAMERf7-dMqShPKET8",{"id":25677,"title":25678,"author":25679,"body":25680,"category":25788,"date":25789,"description":25790,"extension":617,"link":25791,"meta":25792,"navigation":499,"path":25793,"seo":25794,"slug":25684,"stem":25796,"tags":25797,"teaser":25800,"__hash__":25801},"blog/blog/synyx-bei-den-xp-days-2013.md","synyx bei den XP Days 2013",[18598],{"type":11,"value":25681,"toc":25786},[25682,25685,25694,25697,25706,25715,25727,25736,25745,25753,25756,25765,25774,25783],[14,25683,25678],{"id":25684},"synyx-bei-den-xp-days-2013",[18,25686,25687,25688,25693],{},"Vom 14. bis 16. November fanden in Karlsruhe die ",[585,25689,25692],{"href":25690,"rel":25691,"title":25692},"http://xpdays.de/2013/",[589],"XP Days"," statt und von synyx\nwaren (inklusive mir) vier Mitarbeiter dort. Da keiner von uns den Open Space am Samstag besuchte, gebe ich hier nur ein\nkurzes persönliches Fazit zu den ersten beiden Konferenztagen.",[18,25695,25696],{},"Ich bin bei synyx sowohl als Scrum Master als auch als Entwickler in Projekten tätig. Daher muss ich für mich persönlich\nauch bei Fortbildungen immer zwischen reinen Prozess Themen, die mich in meiner Arbeit als Scrum Master weiter bringen\nund Themen aus der Softwareentwicklung abwägen. Für mich waren die XP Days eine perfekte Mischung aus diesen beiden\nBereichen. Zumindest hatte ich die Möglichkeit die angebotenen Vorträge und Workshops an den beiden Konferenztagen so\nzusammen zu stellen, dass ich in beiden für mich relevanten Themenbereichen wertvolle neue Erkenntnisse gewinnen konnte.",[18,25698,25699,25700,25705],{},"Die Konferenz began für mich mit dem\nVortrag ",[585,25701,25704],{"href":25702,"rel":25703},"http://www.xpdays.de/2013/sessions/079-kreative-retrospektiven.html",[589],"Kreative Retrospektiven"," von Pierluigi\nPugliese, der für mich sehr interessant war, da Retrospektiven bei uns bisher immer nach dem gleichen Muster abliefen.\nObwohl hier keine konkreten Beispiele geliefert wurden, wie man Retrospektiven kreativer gestalten kann (mit dem Hinweis\nauf entsprechende Fachliteratur), lieferte Herr Pugliese doch einen sehr guten Überblick über das Thema und gab vor\nallem zahlreiche Begründungen, warum es in vielen Situationen sinnvoll sein kann, den Ablauf von Retrospektiven zu\nvariieren.",[18,25707,25708,25709,25714],{},"Weiter ging es\nmit ",[585,25710,25713],{"href":25711,"rel":25712},"http://www.xpdays.de/2013/sessions/049-pair-programming-mythbusters.html",[589],"Pairprogramming Mysthbusters",". Die Mythen\nwelche Martin Ruprecht sich vornahm zu beseitigen, sind bei synyx meiner Meinung nach kein Thema (mehr), da wir\nPairprogramming schon länger erfolgreich einsetzen und dies auch von den meisten Kunden als Vorteil gesehen wird.\nTrotzdem konnte ich aus diesem Vortrag einige interessante Denkanstöße mitnehmen, wie man die Arbeit im Pair noch weiter\nverbessern kann.",[18,25716,25717,25722,25726],{},[585,25718,25721],{"href":25719,"rel":25720},"http://www.xpdays.de/2013/sessions/019-ich-will-kein-agiler-coach-mehr-sein-und-du.html",[589],"„",[585,25723,25725],{"href":25719,"rel":25724},[589],"Ich will kein Agiler Coach mehr sein – Und du?“",".\nDieser von Johannes Link sehr provokant gewählte Vortragstitel machte mich neugierig. Einige seiner Argumente, wie etwa,\ndass der agile Ansatz nicht mehr als ein Placebo wäre, klangen sehr schlüssig und zeigten mir, dass man viele\nRichtlinien, die uns die agile Theorie vorgibt, auch kritisch nach ihrem tatsächlichen Nutzen hinterfragen kann.\nLetztendlich überzeugte er mich mit seiner Botschaft zwar nicht, sorgte aber für eine Horizonterweiterung, die mich\neinige der noch folgenden Vorträge auch immer mit einem anderen Ohr hören ließ.",[18,25728,25729,25730,25735],{},"Am Nachmittag überzeugte mich Martin Klose mit seinem\nVortrag ",[585,25731,25734],{"href":25732,"rel":25733},"http://www.xpdays.de/2013/sessions/100-code-retreat-behind-the-scenes.html",[589],"Code Retreat – behind the scenes","\ndavon, dass wir sowas unbedingt auch mal bei synyx machen müssen, um Entwickler Know-How besser zwischen den einzelnen\nTeams zu verteilen.",[18,25737,25738,25739,25744],{},"Als letzte Veranstaltung des Tages hörte ich mir noch\ndie ",[585,25740,25743],{"href":25741,"rel":25742},"http://www.xpdays.de/2013/sessions/052-kanban-nicht-yet-another-development-process.html",[589],"Einführung zu Kanban"," von\nFlorian Eisenberg an, um mir zum einen Anregungen für meine Arbeit mit unserem Admin Team zu holen und zum anderen Ideen\nzu sammeln, inwiefern Kanban eine Alternative zu Scrum im Softwareentwicklungsprozess darstellen kann. Eine wichtige\nErkenntnis aus diesem Vortrag war für mich, dass Scrum sich in erster Linie durch seine strengeren Regeln und seine\nstrenge Definition als Framework von Kanban unterscheidet und dadurch vor allem in Situationen sinnvoll sein kann, in\ndenen radikale Veränderungen durchgeführt werden sollen. Ob das in unseren Projekten notwendig oder vielleicht eher\nschädlich ist bin ich mir nicht so sicher.",[18,25746,15622,25747,25752],{},[585,25748,25751],{"href":25749,"rel":25750},"http://www.xpdays.de/2013/sessions/keynote-dan-north-why-agile-doesnt-scale-and-what-you-can-do-about-it.html",[589],"Keynote","\nam folgenden Tag begeisterte mich vor allem schon durch den hervorragenden Vortragsstil von Dan North. Auch inhaltlich\nwar der Vortrag sehr interessant, nur konnte ich hier leider wenig für meine tägliche Arbeit mitnehmen. Trotzdem bleibt\nes der aus meiner Sicht beste Vortrag den ich an den zwei Konferenztagen gehört habe.",[18,25754,25755],{},"Die bisher beschriebenen Vorträge stellten für mich den Prozess-technischem Teil der Konferenz dar. Zusätzlich zu\ndiesen Vorträgen besuchte ich drei Workshops, in denen die Teilnehmer anhand von konkreten Übungen ihre\nProgrammierkenntnisse erweitern konnten.",[18,25757,25758,25759,25764],{},"Der erste Workshop war für mich gleichzeitig der, welcher mir am meisten gebracht\nhat: ",[585,25760,25763],{"href":25761,"rel":25762},"http://www.xpdays.de/2013/sessions/060b-object-calisthenics-objekt-gymnastik-praxis.html",[589],"Object Calisthenics"," mit\nFranziska Sauerwein und David Burkhart. Im Workshop sollte ein einfaches Programm zur Darstellung eines Tennisspiels im\nPair so refactored werden, dass zehn mehr oder weniger restriktive Regeln eingehalten werden. Mein Kollege Michael\nHerbold und ich machten diesen Workshop gemeinsam und wir stellten schnell fest, dass diese Übung uns dazu\nherausforderte uns aus unserer Komfortzone hinsichtlich Programmiertechniken heraus zu bewegen. Zum Beispiel war es im\nersten Moment gar nicht so einfach die Anforderungen: „Alle Primitven Typen müssen in fachlichen Klassen gewrappt\nwerden“, „Maximal zwei Instanzvariablen pro Klasse“ und „Kein public access auf Instanzvariablen (auch keine getter und\nsetter)“ unter einen Hut zu bringen. Wir konnten in den 90 Minuten in jedem Fall viele Anregungen sammeln, die uns\nhoffentlich auch im Alltag helfen werden, wieder gezielt auf Objektorientierung und allgemein guten Codestil zu achten.\nZudem machte uns der Workshop so viel Spass, dass wir uns am Abend nach dem letzten Vortrag nochmal zusammen setzten und\nwährend wir auf unsere Bahn warteten, ein bisschen am Code weiter bastelten.",[18,25766,25767,25768,25773],{},"Der zweite Workshop den ich besuchen wollte,\nwar ",[585,25769,25772],{"href":25770,"rel":25771},"http://www.xpdays.de/2013/sessions/054-hands-on-test-refactoring.html",[589],"Hands-On Test Refactoring"," mit Marco\nEmrich. Hier muss ich leider sagen, dass ich den Workshop bereits nach der Einführung verlassen habe. Dies lag vor allem\ndaran dass mir der Vortragsstil und die Art wie der Workshop geleitet wurde nicht zusagte. Zum anderen waren auch die\nThemen, die behandelt werden sollten, nicht auf dem Level wie ich es mir erwartet hatte. Diese Veranstaltung war für\nmich schließlich die einzige, in der meine Erwartungen nicht erfüllt wurden.",[18,25775,25776,25777,25782],{},"Der letzte Workshop und für mich auch die letzte Veranstaltung der Konferenz\nwar ",[585,25778,25781],{"href":25779,"rel":25780},"http://www.xpdays.de/2013/sessions/093-taking-baby-steps-reloaded.html",[589],"Taking Baby Steps"," mit Marc Philipp, und\nFabian Knittel. In diesem zweistündigen Workshop sollten die Teilnehmer im Pair ein TicTacToe Spiel programmieren, mit\nder Einschränkung, dass jeweils nur zwei Minuten Zeit waren, um einen Test zu schreiben und diesen zu implementieren.\nLeider konnte ich mit meinem Pairing-Partner den erwünschten Effekt dieses Experiments erst gegen Ende des Workshops so\nrichtig nachfühlen, da wir zu Beginn noch stark durch technische Probleme gehemmt waren (fremde IDE, keine externe\nMaus/Tastatur). Schließlich konnten wir uns aber auch der Erkenntnis der anderen Teilnehmer anschließen: im Alltag will\nman oft viel zu viele Probleme auf einmal lösen und diese starke Restriktion auf zwei Minuten pro Commit zeigte jedem\nvon uns, dass man in kleineren Schritten oft zu einer viel besseren und saubereren Lösung kommt.",[18,25784,25785],{},"Zusammenfassend kann ich sagen, dass sich die zwei Konferenztage für mich auf jeden Fall gelohnt haben und ich die XP\nDays nächstes Jahr gerne wieder besuchen möchte. Als konkrete Maßnahmen habe ich mir als Entwickler vorgenommen das\nerlernte Coding-Know-How sowie die Pairprogramming Tipps im Alltag umzusetzen. Als Scrum Master will ich versuchen\nunsere Retrospektiven etwas mehr zu variieren und auf jeden Fall einen synyx internen Code Retreat mit allen Entwicklern\nzu organisieren. Abschließend bleibt mir nur noch ein großes Lob an die Organisatoren der XP Days 2013 zu geben, die für\nden aus meiner Sicht reibungslosen Ablauf der Konferenz gesorgt haben.",{"title":48,"searchDepth":86,"depth":86,"links":25787},[],[613],"2013-11-26T12:56:17","Vom 14. bis 16. November fanden in Karlsruhe die XP Days statt und von synyx\\nwaren (inklusive mir) vier Mitarbeiter dort. Da keiner von uns den Open Space am Samstag besuchte, gebe ich hier nur ein\\nkurzes persönliches Fazit zu den ersten beiden Konferenztagen.","https://synyx.de/blog/synyx-bei-den-xp-days-2013/",{},"/blog/synyx-bei-den-xp-days-2013",{"title":25678,"description":25795},"Vom 14. bis 16. November fanden in Karlsruhe die XP Days statt und von synyx\nwaren (inklusive mir) vier Mitarbeiter dort. Da keiner von uns den Open Space am Samstag besuchte, gebe ich hier nur ein\nkurzes persönliches Fazit zu den ersten beiden Konferenztagen.","blog/synyx-bei-den-xp-days-2013",[7906,7721,25798,25799],"xp","xp-days","Vom 14. bis 16. November fanden in Karlsruhe die XP Days statt und von synyx waren (inklusive mir) vier Mitarbeiter dort. Da keiner von uns den Open Space am Samstag…","AlFZbG3xUTUaJ_2El4i16UP-wLKyvViaV81FOYDxiaI",{"id":25803,"title":25804,"author":25805,"body":25807,"category":26473,"date":26474,"description":26475,"extension":617,"link":26476,"meta":26477,"navigation":499,"path":26478,"seo":26479,"slug":25811,"stem":26481,"tags":26482,"teaser":26484,"__hash__":26485},"blog/blog/building-android-projects-with-maven-part-2-releases-with-maven.md","Building Android projects with maven – part 2: Releases with maven",[25806],"knell",{"type":11,"value":25808,"toc":26467},[25809,25812,25826,25830,25833,25836,25865,25868,25877,25930,25939,25948,25952,25955,26009,26012,26081,26085,26088,26091,26094,26162,26165,26366,26369,26383,26386,26405,26408,26411,26415,26418,26427,26430,26433,26442,26445,26450,26453,26461,26464],[14,25810,25804],{"id":25811},"building-android-projects-with-maven-part-2-releases-with-maven",[18,25813,21422,25814,25819,25820,25825],{},[585,25815,25818],{"href":25816,"rel":25817},"http://blog.synyx.de/?p=4593",[589],"my previous post",", I showed you the basic setup for android with maven using\nthe ",[585,25821,25824],{"href":25822,"rel":25823},"http://code.google.com/p/maven-android-plugin/",[589],"android-maven-plugin",". Now I’ll show you how to configure it to\nmake releases with maven, and how to configure the plugins to save you some work.",[2207,25827,25829],{"id":25828},"configuring-the-keystore-data","Configuring the keystore data",[18,25831,25832],{},"If you have used the release archetype like in the previous post, most of the work is already done. The necessary\nplugins are configured and only need some additional data, like the data for your release keystore.",[18,25834,25835],{},"Looking into the poms, you’ll find the property keys, you have to provide:",[43,25837,25839],{"className":3792,"code":25838,"language":3794,"meta":48,"style":48},"\n \u003Ckeystore>${sign.keystore}\u003C/keystore>\n \u003Calias>${sign.alias}\u003C/alias>\n \u003Cstorepass>${sign.storepass}\u003C/storepass>\n \u003Ckeypass>${sign.keypass}\u003C/keypass>\n\n",[50,25840,25841,25845,25850,25855,25860],{"__ignoreMap":48},[53,25842,25843],{"class":55,"line":56},[53,25844,500],{"emptyLinePlaceholder":499},[53,25846,25847],{"class":55,"line":86},[53,25848,25849],{}," \u003Ckeystore>${sign.keystore}\u003C/keystore>\n",[53,25851,25852],{"class":55,"line":126},[53,25853,25854],{}," \u003Calias>${sign.alias}\u003C/alias>\n",[53,25856,25857],{"class":55,"line":163},[53,25858,25859],{}," \u003Cstorepass>${sign.storepass}\u003C/storepass>\n",[53,25861,25862],{"class":55,"line":186},[53,25863,25864],{}," \u003Ckeypass>${sign.keypass}\u003C/keypass>\n",[18,25866,25867],{},"Simply configure them into your maven settings.xml in the release profile (which is also configured in the parent pom):",[18,25869,25870],{},[573,25871,25872,25873,8780],{},"(If you don’t have a release key yet, check this\nout ",[585,25874,25875],{"href":25875,"rel":25876},"http://developer.android.com/tools/publishing/app-signing.html#cert",[589],[43,25878,25880],{"className":3792,"code":25879,"language":3794,"meta":48,"style":48},"\n \u003Cprofile>\n \u003Cid>release\u003C/id>\n \u003Cproperties>\n \u003Csign.keystore>/path/to/your/keystore/keystore.keystore\u003C/sign.keystore>\n \u003Csign.alias>your-key-alias\u003C/sign.alias>\n \u003Csign.storepass>your-keystore-password\u003C/sign.storepass>\n \u003Csign.keypass>your-key-password\u003C/sign.keypass>\n \u003C/properties>\n \u003C/profile>\n\n",[50,25881,25882,25886,25891,25896,25901,25906,25911,25916,25921,25926],{"__ignoreMap":48},[53,25883,25884],{"class":55,"line":56},[53,25885,500],{"emptyLinePlaceholder":499},[53,25887,25888],{"class":55,"line":86},[53,25889,25890],{}," \u003Cprofile>\n",[53,25892,25893],{"class":55,"line":126},[53,25894,25895],{}," \u003Cid>release\u003C/id>\n",[53,25897,25898],{"class":55,"line":163},[53,25899,25900],{}," \u003Cproperties>\n",[53,25902,25903],{"class":55,"line":186},[53,25904,25905],{}," \u003Csign.keystore>/path/to/your/keystore/keystore.keystore\u003C/sign.keystore>\n",[53,25907,25908],{"class":55,"line":221},[53,25909,25910],{}," \u003Csign.alias>your-key-alias\u003C/sign.alias>\n",[53,25912,25913],{"class":55,"line":242},[53,25914,25915],{}," \u003Csign.storepass>your-keystore-password\u003C/sign.storepass>\n",[53,25917,25918],{"class":55,"line":273},[53,25919,25920],{}," \u003Csign.keypass>your-key-password\u003C/sign.keypass>\n",[53,25922,25923],{"class":55,"line":279},[53,25924,25925],{}," \u003C/properties>\n",[53,25927,25928],{"class":55,"line":496},[53,25929,10355],{},[18,25931,25932,25933,25938],{},"For our case, we removed everything related to proguard from the\nconfigs, ",[585,25934,25937],{"href":25935,"rel":25936},"http://www.youtube.com/watch?v=4Z2Z23SAFVA",[589],"beacuase"," we don’t use it for this project. Instead of removing\neverything, you could also just disable it with the android-maven-plugin configuration",[43,25940,25942],{"className":3792,"code":25941,"language":3794,"meta":48,"style":48},"\u003Cproguardskip>true\u003C/proguardskip>\n",[50,25943,25944],{"__ignoreMap":48},[53,25945,25946],{"class":55,"line":56},[53,25947,25941],{},[2207,25949,25951],{"id":25950},"adding-scm-connection-and-distribution-management","Adding scm connection and distribution management",[18,25953,25954],{},"Like you would normally, using maven releases, you have to add an scm connection to your parent pom, e.g.:",[43,25956,25958],{"className":7103,"code":25957,"language":7105,"meta":48,"style":48},"\u003Cscm>\n \u003Cdeveloperconnection\n >scm:git:ssh://asd@some.git.server/my-android-project.git\u003C/developerconnection\n >\n \u003Ctag>HEAD\u003C/tag>\n\u003C/scm>\n",[50,25959,25960,25968,25975,25982,25987,26001],{"__ignoreMap":48},[53,25961,25962,25964,25966],{"class":55,"line":56},[53,25963,7112],{"class":82},[53,25965,10101],{"class":102},[53,25967,7134],{"class":82},[53,25969,25970,25972],{"class":55,"line":86},[53,25971,7139],{"class":82},[53,25973,25974],{"class":102},"developerconnection\n",[53,25976,25977,25980],{"class":55,"line":126},[53,25978,25979],{"class":82}," >scm:git:ssh://asd@some.git.server/my-android-project.git\u003C/",[53,25981,25974],{"class":102},[53,25983,25984],{"class":55,"line":163},[53,25985,25986],{"class":82}," >\n",[53,25988,25989,25991,25994,25997,25999],{"class":55,"line":186},[53,25990,7139],{"class":82},[53,25992,25993],{"class":102},"tag",[53,25995,25996],{"class":82},">HEAD\u003C/",[53,25998,25993],{"class":102},[53,26000,7134],{"class":82},[53,26002,26003,26005,26007],{"class":55,"line":221},[53,26004,7232],{"class":82},[53,26006,10101],{"class":102},[53,26008,7134],{"class":82},[18,26010,26011],{},"If you have a server to deploy your maven artifacts to, now is the time to add it to your config (if you only build and\ndeploy them from your local machine, you don’t need this):",[43,26013,26015],{"className":3792,"code":26014,"language":3794,"meta":48,"style":48},"\n \u003Cdistributionmanagement>\n \u003Crepository>\n \u003Cname>releases-repository\u003C/name>\n \u003Cid>releases.some.address\u003C/id>\n \u003Curl>https://some.address/content/repositories/releases/\u003C/url>\n \u003C/repository>\n \u003Csnapshotrepository>\n \u003Cname>snapshots-repository\u003C/name>\n \u003Cid>snapshots.some.address\u003C/id>\n \u003Curl>https://some.address/content/repositories/snapshots/\u003C/url>\n \u003C/snapshotrepository>\n \u003C/distributionmanagement>\n\n",[50,26016,26017,26021,26026,26031,26036,26041,26046,26051,26056,26061,26066,26071,26076],{"__ignoreMap":48},[53,26018,26019],{"class":55,"line":56},[53,26020,500],{"emptyLinePlaceholder":499},[53,26022,26023],{"class":55,"line":86},[53,26024,26025],{}," \u003Cdistributionmanagement>\n",[53,26027,26028],{"class":55,"line":126},[53,26029,26030],{}," \u003Crepository>\n",[53,26032,26033],{"class":55,"line":163},[53,26034,26035],{}," \u003Cname>releases-repository\u003C/name>\n",[53,26037,26038],{"class":55,"line":186},[53,26039,26040],{}," \u003Cid>releases.some.address\u003C/id>\n",[53,26042,26043],{"class":55,"line":221},[53,26044,26045],{}," \u003Curl>https://some.address/content/repositories/releases/\u003C/url>\n",[53,26047,26048],{"class":55,"line":242},[53,26049,26050],{}," \u003C/repository>\n",[53,26052,26053],{"class":55,"line":273},[53,26054,26055],{}," \u003Csnapshotrepository>\n",[53,26057,26058],{"class":55,"line":279},[53,26059,26060],{}," \u003Cname>snapshots-repository\u003C/name>\n",[53,26062,26063],{"class":55,"line":496},[53,26064,26065],{}," \u003Cid>snapshots.some.address\u003C/id>\n",[53,26067,26068],{"class":55,"line":503},[53,26069,26070],{}," \u003Curl>https://some.address/content/repositories/snapshots/\u003C/url>\n",[53,26072,26073],{"class":55,"line":509},[53,26074,26075],{}," \u003C/snapshotrepository>\n",[53,26077,26078],{"class":55,"line":515},[53,26079,26080],{}," \u003C/distributionmanagement>\n",[2207,26082,26084],{"id":26083},"configure-the-android-manifest-from-maven","Configure the Android Manifest from Maven",[18,26086,26087],{},"To save us some nasty work, we also want to configure the Android manifest with our maven profiles (e.g. setting Debug\nto false for release, automatically update the version on releases, etc.)",[18,26089,26090],{},"For this, we’ll need to filter the manifest in maven:",[18,26092,26093],{},"add following plugins to the parent pom:",[43,26095,26097],{"className":3792,"code":26096,"language":3794,"meta":48,"style":48},"\n \u003Cplugin>\n \u003Cgroupid>org.apache.maven.plugins\u003C/groupid>\n \u003Cartifactid>maven-resources-plugin\u003C/artifactid>\n \u003Cversion>2.6\u003C/version>\n \u003Cconfiguration>\n \u003Cencoding>UTF-8\u003C/encoding>\n \u003C/configuration>\n \u003C/plugin>\n \u003Cplugin>\n \u003Cgroupid>org.codehaus.mojo\u003C/groupid>\n \u003Cartifactid>build-helper-maven-plugin\u003C/artifactid>\n \u003Cversion>1.8\u003C/version>\n \u003C/plugin>\n\n",[50,26098,26099,26103,26107,26112,26117,26122,26126,26131,26135,26139,26143,26148,26153,26158],{"__ignoreMap":48},[53,26100,26101],{"class":55,"line":56},[53,26102,500],{"emptyLinePlaceholder":499},[53,26104,26105],{"class":55,"line":86},[53,26106,10255],{},[53,26108,26109],{"class":55,"line":126},[53,26110,26111],{}," \u003Cgroupid>org.apache.maven.plugins\u003C/groupid>\n",[53,26113,26114],{"class":55,"line":163},[53,26115,26116],{}," \u003Cartifactid>maven-resources-plugin\u003C/artifactid>\n",[53,26118,26119],{"class":55,"line":186},[53,26120,26121],{}," \u003Cversion>2.6\u003C/version>\n",[53,26123,26124],{"class":55,"line":221},[53,26125,10275],{},[53,26127,26128],{"class":55,"line":242},[53,26129,26130],{}," \u003Cencoding>UTF-8\u003C/encoding>\n",[53,26132,26133],{"class":55,"line":273},[53,26134,10295],{},[53,26136,26137],{"class":55,"line":279},[53,26138,10340],{},[53,26140,26141],{"class":55,"line":496},[53,26142,10255],{},[53,26144,26145],{"class":55,"line":503},[53,26146,26147],{}," \u003Cgroupid>org.codehaus.mojo\u003C/groupid>\n",[53,26149,26150],{"class":55,"line":509},[53,26151,26152],{}," \u003Cartifactid>build-helper-maven-plugin\u003C/artifactid>\n",[53,26154,26155],{"class":55,"line":515},[53,26156,26157],{}," \u003Cversion>1.8\u003C/version>\n",[53,26159,26160],{"class":55,"line":521},[53,26161,10340],{},[18,26163,26164],{},"… and the config for them in the app module (and IT module) pom:",[43,26166,26168],{"className":3792,"code":26167,"language":3794,"meta":48,"style":48},"\n \u003Cbuild>\n \u003Cresources>\n \u003Cresource>\n \u003Ctargetpath>${project.basedir}/target/filtered-manifest\u003C/targetpath>\n \u003Cfiltering>true\u003C/filtering>\n \u003Cdirectory>${basedir}\u003C/directory>\n \u003Cincludes>\n \u003Cinclude>AndroidManifest.xml\u003C/include>\n \u003C/includes>\n \u003C/resource>\n \u003C/resources>\n ...\n \u003Cplugins>\n ...\n \u003Cplugin>\n \u003Cgroupid>org.codehaus.mojo\u003C/groupid>\n \u003Cartifactid>build-helper-maven-plugin\u003C/artifactid>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cid>parse-version\u003C/id>\n \u003Cgoals>\n \u003Cgoal>parse-version\u003C/goal>\n \u003C/goals>\n \u003C/execution>\n \u003C/executions>\n \u003C/plugin>\n \u003Cplugin>\n \u003Cartifactid>maven-resources-plugin\u003C/artifactid>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cphase>initialize\u003C/phase>\n \u003Cgoals>\n \u003Cgoal>resources\u003C/goal>\n \u003C/goals>\n \u003C/execution>\n \u003C/executions>\n \u003C/plugin>\n ...\n \u003C/plugins>\n \u003C/build>\n\n",[50,26169,26170,26174,26179,26184,26189,26194,26199,26204,26209,26214,26219,26224,26229,26234,26239,26244,26249,26254,26259,26264,26269,26274,26279,26284,26289,26294,26299,26304,26309,26314,26318,26322,26327,26331,26336,26340,26344,26348,26352,26356,26361],{"__ignoreMap":48},[53,26171,26172],{"class":55,"line":56},[53,26173,500],{"emptyLinePlaceholder":499},[53,26175,26176],{"class":55,"line":86},[53,26177,26178],{}," \u003Cbuild>\n",[53,26180,26181],{"class":55,"line":126},[53,26182,26183],{}," \u003Cresources>\n",[53,26185,26186],{"class":55,"line":163},[53,26187,26188],{}," \u003Cresource>\n",[53,26190,26191],{"class":55,"line":186},[53,26192,26193],{}," \u003Ctargetpath>${project.basedir}/target/filtered-manifest\u003C/targetpath>\n",[53,26195,26196],{"class":55,"line":221},[53,26197,26198],{}," \u003Cfiltering>true\u003C/filtering>\n",[53,26200,26201],{"class":55,"line":242},[53,26202,26203],{}," \u003Cdirectory>${basedir}\u003C/directory>\n",[53,26205,26206],{"class":55,"line":273},[53,26207,26208],{}," \u003Cincludes>\n",[53,26210,26211],{"class":55,"line":279},[53,26212,26213],{}," \u003Cinclude>AndroidManifest.xml\u003C/include>\n",[53,26215,26216],{"class":55,"line":496},[53,26217,26218],{}," \u003C/includes>\n",[53,26220,26221],{"class":55,"line":503},[53,26222,26223],{}," \u003C/resource>\n",[53,26225,26226],{"class":55,"line":509},[53,26227,26228],{}," \u003C/resources>\n",[53,26230,26231],{"class":55,"line":515},[53,26232,26233],{}," ...\n",[53,26235,26236],{"class":55,"line":521},[53,26237,26238],{}," \u003Cplugins>\n",[53,26240,26241],{"class":55,"line":527},[53,26242,26243],{}," ...\n",[53,26245,26246],{"class":55,"line":533},[53,26247,26248],{}," \u003Cplugin>\n",[53,26250,26251],{"class":55,"line":539},[53,26252,26253],{}," \u003Cgroupid>org.codehaus.mojo\u003C/groupid>\n",[53,26255,26256],{"class":55,"line":545},[53,26257,26258],{}," \u003Cartifactid>build-helper-maven-plugin\u003C/artifactid>\n",[53,26260,26261],{"class":55,"line":2414},[53,26262,26263],{}," \u003Cexecutions>\n",[53,26265,26266],{"class":55,"line":2426},[53,26267,26268],{}," \u003Cexecution>\n",[53,26270,26271],{"class":55,"line":2438},[53,26272,26273],{}," \u003Cid>parse-version\u003C/id>\n",[53,26275,26276],{"class":55,"line":2451},[53,26277,26278],{}," \u003Cgoals>\n",[53,26280,26281],{"class":55,"line":2459},[53,26282,26283],{}," \u003Cgoal>parse-version\u003C/goal>\n",[53,26285,26286],{"class":55,"line":2470},[53,26287,26288],{}," \u003C/goals>\n",[53,26290,26291],{"class":55,"line":2476},[53,26292,26293],{}," \u003C/execution>\n",[53,26295,26296],{"class":55,"line":2484},[53,26297,26298],{}," \u003C/executions>\n",[53,26300,26301],{"class":55,"line":2490},[53,26302,26303],{}," \u003C/plugin>\n",[53,26305,26306],{"class":55,"line":2495},[53,26307,26308],{}," \u003Cplugin>\n",[53,26310,26311],{"class":55,"line":2507},[53,26312,26313],{}," \u003Cartifactid>maven-resources-plugin\u003C/artifactid>\n",[53,26315,26316],{"class":55,"line":2528},[53,26317,26263],{},[53,26319,26320],{"class":55,"line":2539},[53,26321,26268],{},[53,26323,26324],{"class":55,"line":2551},[53,26325,26326],{}," \u003Cphase>initialize\u003C/phase>\n",[53,26328,26329],{"class":55,"line":2562},[53,26330,26278],{},[53,26332,26333],{"class":55,"line":2573},[53,26334,26335],{}," \u003Cgoal>resources\u003C/goal>\n",[53,26337,26338],{"class":55,"line":2585},[53,26339,26288],{},[53,26341,26342],{"class":55,"line":2593},[53,26343,26293],{},[53,26345,26346],{"class":55,"line":2600},[53,26347,26298],{},[53,26349,26350],{"class":55,"line":2605},[53,26351,26303],{},[53,26353,26354],{"class":55,"line":2610},[53,26355,26243],{},[53,26357,26358],{"class":55,"line":2622},[53,26359,26360],{}," \u003C/plugins>\n",[53,26362,26363],{"class":55,"line":2638},[53,26364,26365],{}," \u003C/build>\n",[18,26367,26368],{},"Don’t forget to update the Android manifest path in the android-maven-plugin to the filtered manifest location!",[43,26370,26372],{"className":3792,"code":26371,"language":3794,"meta":48,"style":48},"\n\u003Candroidmanifestfile>${project.build.directory}/filtered-manifest/AndroidManifest.xml\u003C/androidmanifestfile>\n\n",[50,26373,26374,26378],{"__ignoreMap":48},[53,26375,26376],{"class":55,"line":56},[53,26377,500],{"emptyLinePlaceholder":499},[53,26379,26380],{"class":55,"line":86},[53,26381,26382],{},"\u003Candroidmanifestfile>${project.build.directory}/filtered-manifest/AndroidManifest.xml\u003C/androidmanifestfile>\n",[18,26384,26385],{},"In the Android Manifest files of the modules, you can now insert the maven property placeholders for the versionCode and\nversionName",[43,26387,26389],{"className":3792,"code":26388,"language":3794,"meta":48,"style":48},"\n\u003Cmanifest ....=\"\" android:versioncode=\"${parsedVersion.majorVersion}${parsedVersion.minorVersion}${parsedVersion.incrementalVersion}\" android:versionname=\"${project.version}\" xmlns:android=\"http://schemas.android.com/apk/res/android\">\n\u003C/manifest>\n",[50,26390,26391,26395,26400],{"__ignoreMap":48},[53,26392,26393],{"class":55,"line":56},[53,26394,500],{"emptyLinePlaceholder":499},[53,26396,26397],{"class":55,"line":86},[53,26398,26399],{},"\u003Cmanifest ....=\"\" android:versioncode=\"${parsedVersion.majorVersion}${parsedVersion.minorVersion}${parsedVersion.incrementalVersion}\" android:versionname=\"${project.version}\" xmlns:android=\"http://schemas.android.com/apk/res/android\">\n",[53,26401,26402],{"class":55,"line":126},[53,26403,26404],{},"\u003C/manifest>\n",[18,26406,26407],{},"If you now build your project (clean install), the generated Android manifest should include the versionCode “100” and\nthe version “1.0-Snapshot” (If you didn’t change the version in the pom, yet).",[18,26409,26410],{},"Other than that, you have plenty of configuration possibilities of your AndroidManifest within the\nandroid-maven-plugin config, and with this setup you can easily alter the values for the different build profiles.",[2207,26412,26414],{"id":26413},"building-the-release","Building the release",[18,26416,26417],{},"To build the release, use",[43,26419,26421],{"className":13786,"code":26420,"language":13788,"meta":48,"style":48},"mvn release:prepare -Prelease\n",[50,26422,26423],{"__ignoreMap":48},[53,26424,26425],{"class":55,"line":56},[53,26426,26420],{},[18,26428,26429],{},"release:perform is not necessary here!",[18,26431,26432],{},"After the release is done, run",[43,26434,26436],{"className":13786,"code":26435,"language":13788,"meta":48,"style":48},"mvn release:clean\n",[50,26437,26438],{"__ignoreMap":48},[53,26439,26440],{"class":55,"line":56},[53,26441,26435],{},[18,26443,26444],{},"to clean up.",[18,26446,26447],{},[573,26448,26449],{},"Note: you might want to let your scm ignore some files (e.g. release.properties and the target folders) so that you\ndon’t have to further clean up your directories after you run the release.",[18,26451,26452],{},"That’s it, now you are able to build your android app releases with maven!",[18,26454,26455,26456],{},"Here are the sources to check\nagainst: ",[585,26457,26460],{"href":26458,"rel":26459},"https://media.synyx.de/uploads//2013/09/my-android-project.zip",[589],"my-android-project sources",[18,26462,26463],{},"My next blog will be about Android & Continuous integration with Jenkins, so check our blog from time to time 🙂",[607,26465,26466],{},"html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .s7hpK, html code.shiki .s7hpK{--shiki-default:#B31D28;--shiki-default-font-style:italic;--shiki-dark:#FDAEB7;--shiki-dark-font-style:italic}",{"title":48,"searchDepth":86,"depth":86,"links":26468},[26469,26470,26471,26472],{"id":25828,"depth":86,"text":25829},{"id":25950,"depth":86,"text":25951},{"id":26083,"depth":86,"text":26084},{"id":26413,"depth":86,"text":26414},[613,5834,997],"2013-09-18T15:33:14","In my previous post, I showed you the basic setup for android with maven using\\nthe android-maven-plugin. Now I’ll show you how to configure it to\\nmake releases with maven, and how to configure the plugins to save you some work.","https://synyx.de/blog/building-android-projects-with-maven-part-2-releases-with-maven/",{},"/blog/building-android-projects-with-maven-part-2-releases-with-maven",{"title":25804,"description":26480},"In my previous post, I showed you the basic setup for android with maven using\nthe android-maven-plugin. Now I’ll show you how to configure it to\nmake releases with maven, and how to configure the plugins to save you some work.","blog/building-android-projects-with-maven-part-2-releases-with-maven",[5844,25824,10891,26483,10640],"maven-android-plugin","In my previous post, I showed you the basic setup for android with maven using the android-maven-plugin. Now I’ll show you how to configure it to make releases with maven,…","Oemk-uLLhOk07vwymHqwLQY0cgqWKiErUGYoSegG5kA",{"id":26487,"title":26488,"author":26489,"body":26490,"category":27204,"date":27205,"description":27206,"extension":617,"link":27207,"meta":27208,"navigation":499,"path":27209,"seo":27210,"slug":26494,"stem":27211,"tags":27212,"teaser":27215,"__hash__":27216},"blog/blog/building-android-projects-with-maven-part-1-setup.md","Building Android projects with Maven – Part 1: Setup",[25806],{"type":11,"value":26491,"toc":27194},[26492,26495,26498,26501,26505,26512,26515,26520,26534,26540,26543,26546,26555,26558,26561,26564,26588,26593,26597,26600,26603,26607,26627,26631,26660,26663,26667,26670,26709,26712,26715,26718,26722,26729,26732,26771,26774,26798,26801,26892,26895,26903,26906,26915,26918,26922,26925,26934,26937,26940,26974,26977,27006,27009,27039,27042,27045,27148,27151,27154,27178,27181,27189,27192],[14,26493,26488],{"id":26494},"building-android-projects-with-maven-part-1-setup",[18,26496,26497],{},"Building and managing Android projects with maven is not as easy as it could be. So in this blog, I’ll show you how we\nmanaged to get it work nicely.",[18,26499,26500],{},"In this example, we’ll create a parent project with an app module and a separate instrumentation tests module.",[2207,26502,26504],{"id":26503},"project-setup","Project setup",[18,26506,26507,26508,5881],{},"The quickest approach to create a new Android project with maven is using a maven archetype from\naquinet (",[585,26509,26510],{"href":26510,"rel":26511},"http://mvnrepository.com/artifact/de.akquinet.android.archetypes",[589],[18,26513,26514],{},"For this project, we’ll use the android-release archetype, as it creates a skeleton for exactly our case, as we also\nwant to release the app with maven.",[18,26516,26517],{},[573,26518,26519],{},"Hint: be sure to use the latest version of the archetype",[43,26521,26523],{"className":13786,"code":26522,"language":13788,"meta":48,"style":48},"\nmvn archetype:generate -DarchetypeArtifactId=android-release -DarchetypeGroupId=de.akquinet.android.archetypes -DarchetypeVersion=1.0.9 -DgroupId=com.foo.bar -DartifactId=my-android-project -Dpackage=com.foo.bar.android\n",[50,26524,26525,26529],{"__ignoreMap":48},[53,26526,26527],{"class":55,"line":56},[53,26528,500],{"emptyLinePlaceholder":499},[53,26530,26531],{"class":55,"line":86},[53,26532,26533],{},"mvn archetype:generate -DarchetypeArtifactId=android-release -DarchetypeGroupId=de.akquinet.android.archetypes -DarchetypeVersion=1.0.9 -DgroupId=com.foo.bar -DartifactId=my-android-project -Dpackage=com.foo.bar.android\n",[18,26535,26536,26537],{},"With this, a parent project named after the artifactId gets created in your current directory. Inside, you’ll find two\nmodules, one for the app and one for the instrumentation tests, and the pom.xml of the parent is also already\npre-configured with both of them as modules. In the pom, you’ll find several plugins already configured for you, on top\nof all the ",[585,26538,25824],{"href":25822,"rel":26539},[589],[18,26541,26542],{},"Now check the pom for the android-maven-plugin version and update it to the latest version (3.6.1 at the moment I\nwrote this blog). It should be at least 3.6.0 – if not, the build will fail, because the aapt tool can’t be found as\nit moved in a previous android release and the plugin only considers this since version 3.6.0.",[18,26544,26545],{},"To test the setup, simply go into the parent project and run",[43,26547,26549],{"className":13786,"code":26548,"language":13788,"meta":48,"style":48},"mvn clean install\n",[50,26550,26551],{"__ignoreMap":48},[53,26552,26553],{"class":55,"line":56},[53,26554,26548],{},[18,26556,26557],{},"Now everything should be compiled and the instrumentation tests should run on any emulator or device that is connected.",[18,26559,26560],{},"If you get an error stating “Platform/API level 16 not available”, install it via the SDK Manager, or replace the sdk\nversion in the pom to an available one.",[18,26562,26563],{},"In the android-maven-plugin config:",[43,26565,26567],{"className":3792,"code":26566,"language":3794,"meta":48,"style":48},"\n \u003Csdk>\n \u003Cplatform>16\u003C/platform>\n \u003C/sdk>\n",[50,26568,26569,26573,26578,26583],{"__ignoreMap":48},[53,26570,26571],{"class":55,"line":56},[53,26572,500],{"emptyLinePlaceholder":499},[53,26574,26575],{"class":55,"line":86},[53,26576,26577],{}," \u003Csdk>\n",[53,26579,26580],{"class":55,"line":126},[53,26581,26582],{}," \u003Cplatform>16\u003C/platform>\n",[53,26584,26585],{"class":55,"line":163},[53,26586,26587],{}," \u003C/sdk>\n",[18,26589,26590],{},[573,26591,26592],{},"Note: the archetypes from aquinet also include their androlog logging framework which is a wrapper around the Android\nlogging that adds the functionality of disabling the logging for releases and provides log4j-like configuration. You\ncould remove it from the poms, or you could give it a try, which I would recommend you to 🙂",[2207,26594,26596],{"id":26595},"setting-up-android-studio-to-work-with-maven","Setting up Android Studio to work with maven",[18,26598,26599],{},"To conveniently work on a maven project with Android studio, we have to set it to automatically import Maven projects,\nso that it notices changes to the pom.xml and updates it’s dependencies.",[18,26601,26602],{},"File -> Settings -> Maven -> Importing -> Import Maven projects automatically",[649,26604,26606],{"id":26605},"import-the-project","Import the project:",[577,26608,26609,26612,26615,26618,26621,26624],{},[580,26610,26611],{},"File -> Import project",[580,26613,26614],{},"Select your parent project",[580,26616,26617],{},"Import from external model -> Select Maven",[580,26619,26620],{},"Check “Import Maven projects automatically” -> Next",[580,26622,26623],{},"Select the Android platform -> Next",[580,26625,26626],{},"Finish",[649,26628,26630],{"id":26629},"configure-android-studio-to-also-build-using-maven","configure Android Studio to also build using Maven:",[577,26632,26633,26636,26639,26642,26645,26648,26651,26654,26657],{},[580,26634,26635],{},"Select the project",[580,26637,26638],{},"Run -> Edit Configurations",[580,26640,26641],{},"“+” Android Application",[580,26643,26644],{},"Module: select Android App",[580,26646,26647],{},"Launch default Activity",[580,26649,26650],{},"Select default target device",[580,26652,26653],{},"Before Launch: remove “Make”",[580,26655,26656],{},"Before Launch: add Maven “clean install”",[580,26658,26659],{},"OK",[18,26661,26662],{},"Now if you run the project in Android studio, Maven will be used.",[2207,26664,26666],{"id":26665},"running-android-lint","Running Android Lint",[18,26668,26669],{},"It’s recommended to run Android Lint to check warnings and errors. To run it with the android-maven-plugin, simply\ninsert this config to the android-maven-plugin int the pom (and edit the sources path accordingly to your project):",[43,26671,26673],{"className":3792,"code":26672,"language":3794,"meta":48,"style":48},"\n \u003Clint>\n \u003Cskip>false\u003C/skip>\n \u003Csources>${project.basedir}/my-android-project/src/main/java/\u003C/sources>\n \u003Cenablehtml>true\u003C/enablehtml>\n \u003Cenablexml>false\u003C/enablexml>\n \u003C/lint>\n",[50,26674,26675,26679,26684,26689,26694,26699,26704],{"__ignoreMap":48},[53,26676,26677],{"class":55,"line":56},[53,26678,500],{"emptyLinePlaceholder":499},[53,26680,26681],{"class":55,"line":86},[53,26682,26683],{}," \u003Clint>\n",[53,26685,26686],{"class":55,"line":126},[53,26687,26688],{}," \u003Cskip>false\u003C/skip>\n",[53,26690,26691],{"class":55,"line":163},[53,26692,26693],{}," \u003Csources>${project.basedir}/my-android-project/src/main/java/\u003C/sources>\n",[53,26695,26696],{"class":55,"line":186},[53,26697,26698],{}," \u003Cenablehtml>true\u003C/enablehtml>\n",[53,26700,26701],{"class":55,"line":221},[53,26702,26703],{}," \u003Cenablexml>false\u003C/enablexml>\n",[53,26705,26706],{"class":55,"line":242},[53,26707,26708],{}," \u003C/lint>\n",[18,26710,26711],{},"If you want to execute android lint on the build, add android:lint to the maven commands (e.g. “mvn clean install\nandroid:lint”)",[18,26713,26714],{},"Don’t forget to edit the Run config you previously created to include “android:lint”!",[18,26716,26717],{},"The results will be written to /target/lint-results/lint-results-html/. (We disabled the XML output and enabled HTML,\nbecause it just has a better readability and you have a way better overview on the HTML pages)",[2207,26719,26721],{"id":26720},"create-a-separate-instrumentation-test-profile","Create a separate Instrumentation Test Profile",[18,26723,26724,26725,26728],{},"We don’t want to install the app and run the Instrumentation Tests on ",[573,26726,26727],{},"every"," build, because it just takes so long. So\nwe’ll execute the tests only in a separate maven profile.",[18,26730,26731],{},"First, we’ll disable the instrumentation tests for all cases by inserting a configuration for the android-maven-plugin\nin the parent pom:",[43,26733,26735],{"className":3792,"code":26734,"language":3794,"meta":48,"style":48},"\n \u003Cconfiguration>\n \u003Ctest>\n \u003Cskip>true\u003C/skip>\n \u003C/test>\n ...\n \u003C/configuration>\n\n",[50,26736,26737,26741,26746,26751,26756,26761,26766],{"__ignoreMap":48},[53,26738,26739],{"class":55,"line":56},[53,26740,500],{"emptyLinePlaceholder":499},[53,26742,26743],{"class":55,"line":86},[53,26744,26745],{}," \u003Cconfiguration>\n",[53,26747,26748],{"class":55,"line":126},[53,26749,26750],{}," \u003Ctest>\n",[53,26752,26753],{"class":55,"line":163},[53,26754,26755],{}," \u003Cskip>true\u003C/skip>\n",[53,26757,26758],{"class":55,"line":186},[53,26759,26760],{}," \u003C/test>\n",[53,26762,26763],{"class":55,"line":221},[53,26764,26765],{}," ...\n",[53,26767,26768],{"class":55,"line":242},[53,26769,26770],{}," \u003C/configuration>\n",[18,26772,26773],{},"In the parent pom, add the new profile “IT”:",[43,26775,26777],{"className":3792,"code":26776,"language":3794,"meta":48,"style":48},"\n \u003Cprofile>\n \u003Cid>IT\u003C/id>\n \u003C/profile>\n",[50,26778,26779,26783,26788,26793],{"__ignoreMap":48},[53,26780,26781],{"class":55,"line":56},[53,26782,500],{"emptyLinePlaceholder":499},[53,26784,26785],{"class":55,"line":86},[53,26786,26787],{}," \u003Cprofile>\n",[53,26789,26790],{"class":55,"line":126},[53,26791,26792],{}," \u003Cid>IT\u003C/id>\n",[53,26794,26795],{"class":55,"line":163},[53,26796,26797],{}," \u003C/profile>\n",[18,26799,26800],{},"And in the pom of the IT module, we also add the IT profile and enable the tests for it again:",[43,26802,26804],{"className":3792,"code":26803,"language":3794,"meta":48,"style":48},"\n \u003Cprofile>\n \u003Cid>IT\u003C/id>\n \u003Cbuild>\n \u003Cplugins>\n \u003Cplugin>\n \u003Cgroupid>com.jayway.maven.plugins.android.generation2\u003C/groupid>\n \u003Cartifactid>android-maven-plugin\u003C/artifactid>\n \u003Cinherited>true\u003C/inherited>\n \u003Cconfiguration>\n \u003Ctest>\n \u003Cskip>false\u003C/skip>\n \u003C/test>\n \u003C/configuration>\n \u003C/plugin>\n \u003C/plugins>\n \u003C/build>\n \u003C/profile>\n\n",[50,26805,26806,26810,26814,26818,26823,26828,26833,26838,26843,26848,26853,26858,26863,26868,26873,26878,26883,26888],{"__ignoreMap":48},[53,26807,26808],{"class":55,"line":56},[53,26809,500],{"emptyLinePlaceholder":499},[53,26811,26812],{"class":55,"line":86},[53,26813,26787],{},[53,26815,26816],{"class":55,"line":126},[53,26817,26792],{},[53,26819,26820],{"class":55,"line":163},[53,26821,26822],{}," \u003Cbuild>\n",[53,26824,26825],{"class":55,"line":186},[53,26826,26827],{}," \u003Cplugins>\n",[53,26829,26830],{"class":55,"line":221},[53,26831,26832],{}," \u003Cplugin>\n",[53,26834,26835],{"class":55,"line":242},[53,26836,26837],{}," \u003Cgroupid>com.jayway.maven.plugins.android.generation2\u003C/groupid>\n",[53,26839,26840],{"class":55,"line":273},[53,26841,26842],{}," \u003Cartifactid>android-maven-plugin\u003C/artifactid>\n",[53,26844,26845],{"class":55,"line":279},[53,26846,26847],{}," \u003Cinherited>true\u003C/inherited>\n",[53,26849,26850],{"class":55,"line":496},[53,26851,26852],{}," \u003Cconfiguration>\n",[53,26854,26855],{"class":55,"line":503},[53,26856,26857],{}," \u003Ctest>\n",[53,26859,26860],{"class":55,"line":509},[53,26861,26862],{}," \u003Cskip>false\u003C/skip>\n",[53,26864,26865],{"class":55,"line":515},[53,26866,26867],{}," \u003C/test>\n",[53,26869,26870],{"class":55,"line":521},[53,26871,26872],{}," \u003C/configuration>\n",[53,26874,26875],{"class":55,"line":527},[53,26876,26877],{}," \u003C/plugin>\n",[53,26879,26880],{"class":55,"line":533},[53,26881,26882],{}," \u003C/plugins>\n",[53,26884,26885],{"class":55,"line":539},[53,26886,26887],{}," \u003C/build>\n",[53,26889,26890],{"class":55,"line":545},[53,26891,26797],{},[18,26893,26894],{},"To test it, run",[43,26896,26897],{"className":13786,"code":26548,"language":13788,"meta":48,"style":48},[50,26898,26899],{"__ignoreMap":48},[53,26900,26901],{"class":55,"line":56},[53,26902,26548],{},[18,26904,26905],{},"The Instrumentation Tests should not run. And then",[43,26907,26909],{"className":13786,"code":26908,"language":13788,"meta":48,"style":48},"mvn clean install -PIT\n",[50,26910,26911],{"__ignoreMap":48},[53,26912,26913],{"class":55,"line":56},[53,26914,26908],{},[18,26916,26917],{},"Now the tests should run.",[2207,26919,26921],{"id":26920},"dependencies-on-android-libraries","Dependencies on android libraries",[18,26923,26924],{},"The dependencies on the android libraries are some kind of problem, because not all of them are published in maven\ncentral (e.g. only up to Android 4.1.1.4 right now, and only the support-v4 library, not the -v7 one…)",[18,26926,26927,26928,26933],{},"To compensate this, there is the ",[585,26929,26932],{"href":26930,"rel":26931},"https://github.com/mosabua/maven-android-sdk-deployer",[589],"maven-android-sdk-deployer","\nproject that lets you deploy your local sdk components into a maven repository of your choice (defaults to your local\none).",[18,26935,26936],{},"For the tutorial on how to use it, please head over to their github page, everything is explained nicely detailed there.",[18,26938,26939],{},"Just some small pointers for this this setup: if you have a dependency on a apklib (except from the android framework\nitself), define the dependency in the parent project to define the version:",[43,26941,26943],{"className":3792,"code":26942,"language":3794,"meta":48,"style":48},"\n \u003Cdependency>\n \u003Cgroupid>android.support\u003C/groupid>\n \u003Cartifactid>compatibility-v4\u003C/artifactid>\n \u003Cversion>18\u003C/version>\n \u003C/dependency>\n",[50,26944,26945,26949,26954,26959,26964,26969],{"__ignoreMap":48},[53,26946,26947],{"class":55,"line":56},[53,26948,500],{"emptyLinePlaceholder":499},[53,26950,26951],{"class":55,"line":86},[53,26952,26953],{}," \u003Cdependency>\n",[53,26955,26956],{"class":55,"line":126},[53,26957,26958],{}," \u003Cgroupid>android.support\u003C/groupid>\n",[53,26960,26961],{"class":55,"line":163},[53,26962,26963],{}," \u003Cartifactid>compatibility-v4\u003C/artifactid>\n",[53,26965,26966],{"class":55,"line":186},[53,26967,26968],{}," \u003Cversion>18\u003C/version>\n",[53,26970,26971],{"class":55,"line":221},[53,26972,26973],{}," \u003C/dependency>\n",[18,26975,26976],{},"In the app,",[43,26978,26980],{"className":3792,"code":26979,"language":3794,"meta":48,"style":48},"\n \u003Cdependency>\n \u003Cgroupid>android.support\u003C/groupid>\n \u003Cartifactid>compatibility-v4\u003C/artifactid>\n \u003C/dependency>\n",[50,26981,26982,26986,26991,26996,27001],{"__ignoreMap":48},[53,26983,26984],{"class":55,"line":56},[53,26985,500],{"emptyLinePlaceholder":499},[53,26987,26988],{"class":55,"line":86},[53,26989,26990],{}," \u003Cdependency>\n",[53,26992,26993],{"class":55,"line":126},[53,26994,26995],{}," \u003Cgroupid>android.support\u003C/groupid>\n",[53,26997,26998],{"class":55,"line":163},[53,26999,27000],{}," \u003Cartifactid>compatibility-v4\u003C/artifactid>\n",[53,27002,27003],{"class":55,"line":186},[53,27004,27005],{}," \u003C/dependency>\n",[18,27007,27008],{},"And in the IT module, set it as provided!",[43,27010,27012],{"className":3792,"code":27011,"language":3794,"meta":48,"style":48},"\n \u003Cdependency>\n \u003Cgroupid>android.support\u003C/groupid>\n \u003Cartifactid>compatibility-v4\u003C/artifactid>\n \u003Cscope>provided\u003C/scope>\n \u003C/dependency>\n",[50,27013,27014,27018,27022,27026,27030,27035],{"__ignoreMap":48},[53,27015,27016],{"class":55,"line":56},[53,27017,500],{"emptyLinePlaceholder":499},[53,27019,27020],{"class":55,"line":86},[53,27021,26990],{},[53,27023,27024],{"class":55,"line":126},[53,27025,26995],{},[53,27027,27028],{"class":55,"line":163},[53,27029,27000],{},[53,27031,27032],{"class":55,"line":186},[53,27033,27034],{}," \u003Cscope>provided\u003C/scope>\n",[53,27036,27037],{"class":55,"line":221},[53,27038,27005],{},[18,27040,27041],{},"If you want to use newer versions than the preconfigured 4.1.1.4, you have to deploy them yourself and also use\nanother group id for android, simply “android” instead of “com.google.android”. For the exact versions, deploy them with\nthe maven-android-deployer and look into your repository, or simply look at the readme at the github page of the\ndeployer.",[18,27043,27044],{},"Here’s an example config of the dependencies deployed with the maven-android-deployer:",[43,27046,27048],{"className":3792,"code":27047,"language":3794,"meta":48,"style":48},"\n \u003Cproperties>\n \u003Cplatform.version>4.2.2_r2\u003C/platform.version>\n \u003Candroid.platform>17\u003C/android.platform>\n \u003C/properties>\n \u003Cdependencymanagement>\n \u003Cdependencies>\n \u003Cdependency>\n \u003Cgroupid>android\u003C/groupid>\n \u003Cartifactid>android\u003C/artifactid>\n \u003Cversion>${platform.version}\u003C/version>\n \u003Cscope>provided\u003C/scope>\n \u003C/dependency>\n \u003Cdependency>\n \u003Cgroupid>android.test.uiautomator\u003C/groupid>\n \u003Cartifactid>uiautomator\u003C/artifactid>\n \u003Cversion>${platform.version}\u003C/version>\n \u003Cscope>provided\u003C/scope>\n \u003C/dependency>\n \u003C/dependencies>\n \u003C/dependencymanagement>\n\n",[50,27049,27050,27054,27059,27064,27069,27074,27079,27084,27088,27093,27098,27103,27108,27112,27116,27121,27126,27130,27134,27138,27143],{"__ignoreMap":48},[53,27051,27052],{"class":55,"line":56},[53,27053,500],{"emptyLinePlaceholder":499},[53,27055,27056],{"class":55,"line":86},[53,27057,27058],{}," \u003Cproperties>\n",[53,27060,27061],{"class":55,"line":126},[53,27062,27063],{}," \u003Cplatform.version>4.2.2_r2\u003C/platform.version>\n",[53,27065,27066],{"class":55,"line":163},[53,27067,27068],{}," \u003Candroid.platform>17\u003C/android.platform>\n",[53,27070,27071],{"class":55,"line":186},[53,27072,27073],{}," \u003C/properties>\n",[53,27075,27076],{"class":55,"line":221},[53,27077,27078],{}," \u003Cdependencymanagement>\n",[53,27080,27081],{"class":55,"line":242},[53,27082,27083],{}," \u003Cdependencies>\n",[53,27085,27086],{"class":55,"line":273},[53,27087,26953],{},[53,27089,27090],{"class":55,"line":279},[53,27091,27092],{}," \u003Cgroupid>android\u003C/groupid>\n",[53,27094,27095],{"class":55,"line":496},[53,27096,27097],{}," \u003Cartifactid>android\u003C/artifactid>\n",[53,27099,27100],{"class":55,"line":503},[53,27101,27102],{}," \u003Cversion>${platform.version}\u003C/version>\n",[53,27104,27105],{"class":55,"line":509},[53,27106,27107],{}," \u003Cscope>provided\u003C/scope>\n",[53,27109,27110],{"class":55,"line":515},[53,27111,26973],{},[53,27113,27114],{"class":55,"line":521},[53,27115,26953],{},[53,27117,27118],{"class":55,"line":527},[53,27119,27120],{}," \u003Cgroupid>android.test.uiautomator\u003C/groupid>\n",[53,27122,27123],{"class":55,"line":533},[53,27124,27125],{}," \u003Cartifactid>uiautomator\u003C/artifactid>\n",[53,27127,27128],{"class":55,"line":539},[53,27129,27102],{},[53,27131,27132],{"class":55,"line":545},[53,27133,27107],{},[53,27135,27136],{"class":55,"line":2414},[53,27137,26973],{},[53,27139,27140],{"class":55,"line":2426},[53,27141,27142],{}," \u003C/dependencies>\n",[53,27144,27145],{"class":55,"line":2438},[53,27146,27147],{}," \u003C/dependencymanagement>\n",[18,27149,27150],{},"Don’t forget to update the modules dependencies accordingly!",[18,27152,27153],{},"As you can probably see, we also replaced the sdk platform version config with a parameter, in the android maven plugin\nconfig:",[43,27155,27157],{"className":3792,"code":27156,"language":3794,"meta":48,"style":48},"\n \u003Csdk>\n \u003Cplatform>${android.platform}\u003C/platform>\n \u003C/sdk>\n\n",[50,27158,27159,27163,27168,27173],{"__ignoreMap":48},[53,27160,27161],{"class":55,"line":56},[53,27162,500],{"emptyLinePlaceholder":499},[53,27164,27165],{"class":55,"line":86},[53,27166,27167],{}," \u003Csdk>\n",[53,27169,27170],{"class":55,"line":126},[53,27171,27172],{}," \u003Cplatform>${android.platform}\u003C/platform>\n",[53,27174,27175],{"class":55,"line":163},[53,27176,27177],{}," \u003C/sdk>\n",[18,27179,27180],{},"With this you should be able to develop Android (more or less comfortable) using maven and Android studio.",[18,27182,27183,27188],{},[585,27184,27187],{"href":27185,"rel":27186},"http://blog.synyx.de/2013/09/building-android-projects-with-maven-part-2-releases-with-maven/",[589],"In the next blog",", I’ll\nshow you how to configure the plugins to build releases of Android apps!",[18,27190,27191],{},"I’ll also post the sources for the project at the end of the next blogpost, so you can check against it, if you get some\nerrors.",[607,27193,989],{},{"title":48,"searchDepth":86,"depth":86,"links":27195},[27196,27197,27201,27202,27203],{"id":26503,"depth":86,"text":26504},{"id":26595,"depth":86,"text":26596,"children":27198},[27199,27200],{"id":26605,"depth":126,"text":26606},{"id":26629,"depth":126,"text":26630},{"id":26665,"depth":86,"text":26666},{"id":26720,"depth":86,"text":26721},{"id":26920,"depth":86,"text":26921},[613,5834,997],"2013-09-13T14:57:30","Building and managing Android projects with maven is not as easy as it could be. So in this blog, I’ll show you how we\\nmanaged to get it work nicely.","https://synyx.de/blog/building-android-projects-with-maven-part-1-setup/",{},"/blog/building-android-projects-with-maven-part-1-setup",{"title":26488,"description":26497},"blog/building-android-projects-with-maven-part-1-setup",[5844,25824,27213,10891,26483,27214],"build","setup","Building and managing Android projects with maven is not as easy as it could be. So in this blog, I’ll show you how we managed to get it work nicely.…","Dd3LTDJ3Z7whmA3tXJgxDgR0iS7bmAVsr81p1pcb0oo",{"id":27218,"title":27219,"author":27220,"body":27222,"category":27660,"date":27661,"description":27662,"extension":617,"link":27663,"meta":27664,"navigation":499,"path":27665,"seo":27666,"slug":27226,"stem":27667,"tags":27668,"teaser":27673,"__hash__":27674},"blog/blog/yammer-metrics-made-easy-part-i.md","yammer – Metrics made easy – Part I",[27221],"arrasz",{"type":11,"value":27223,"toc":27658},[27224,27227,27230,27233,27265,27268,27282,27285,27304,27307,27310,27313,27316,27333,27336,27343,27346,27349,27374,27377,27380,27383,27386,27389,27392,27436,27439,27641,27644,27650,27653,27656],[14,27225,27219],{"id":27226},"yammer-metrics-made-easy-part-i",[18,27228,27229],{},"Metrics by yammer provides runtime metrics and statistics for all kind of apps you can imagine. A lot of stuff is\ndirectly useable out of the box, for example measuring request/response cycles of webapps and provide histograms of the\nmeasured values. So, lets try enabling a simple Java-Application built by maven.",[18,27231,27232],{},"First we add needed dependencies into our pom:",[43,27234,27236],{"className":3792,"code":27235,"language":3794,"meta":48,"style":48},"\n \u003Cdependency>\n \u003Cgroupid>com.yammer.metrics\u003C/groupid>\n \u003Cartifactid>metrics-core\u003C/artifactid>\n \u003Cversion>3.0.0-BETA1\u003C/version>\n \u003C/dependency>\n\n",[50,27237,27238,27242,27246,27251,27256,27261],{"__ignoreMap":48},[53,27239,27240],{"class":55,"line":56},[53,27241,500],{"emptyLinePlaceholder":499},[53,27243,27244],{"class":55,"line":86},[53,27245,26990],{},[53,27247,27248],{"class":55,"line":126},[53,27249,27250],{}," \u003Cgroupid>com.yammer.metrics\u003C/groupid>\n",[53,27252,27253],{"class":55,"line":163},[53,27254,27255],{}," \u003Cartifactid>metrics-core\u003C/artifactid>\n",[53,27257,27258],{"class":55,"line":186},[53,27259,27260],{}," \u003Cversion>3.0.0-BETA1\u003C/version>\n",[53,27262,27263],{"class":55,"line":221},[53,27264,27005],{},[18,27266,27267],{},"After providing this, we are able to do something like that in our code:",[43,27269,27271],{"className":288,"code":27270,"language":290,"meta":48,"style":48},"\nstatic final MetricRegistry metrics = new MetricRegistry(\"Demonstration\");\n\n",[50,27272,27273,27277],{"__ignoreMap":48},[53,27274,27275],{"class":55,"line":56},[53,27276,500],{"emptyLinePlaceholder":499},[53,27278,27279],{"class":55,"line":86},[53,27280,27281],{},"static final MetricRegistry metrics = new MetricRegistry(\"Demonstration\");\n",[18,27283,27284],{},"The MetricRegistry is not more and not less than a structural component for a couple of Metrics in you Application.\nLet’s imagine you’ve developed an application for remote number crunching, then it would be a good idea creating 2\nMetricRegistry Instances like this:",[43,27286,27288],{"className":288,"code":27287,"language":290,"meta":48,"style":48},"\nstatic final MetricRegistry crunchMetrics = new MetricRegistry(\"CrunchMeasurement\");\nstatic final MetricRegistry requestMetrics = new MetricRegistry(\"RequestMeasurement\");\n\n",[50,27289,27290,27294,27299],{"__ignoreMap":48},[53,27291,27292],{"class":55,"line":56},[53,27293,500],{"emptyLinePlaceholder":499},[53,27295,27296],{"class":55,"line":86},[53,27297,27298],{},"static final MetricRegistry crunchMetrics = new MetricRegistry(\"CrunchMeasurement\");\n",[53,27300,27301],{"class":55,"line":126},[53,27302,27303],{},"static final MetricRegistry requestMetrics = new MetricRegistry(\"RequestMeasurement\");\n",[18,27305,27306],{},"You would use one of them for all measurement of the crunching component itself and the other for the little server you\nincluded to access your numbercruncher (possibly to measure request/response cycles too).",[18,27308,27309],{},"First step is done. We are now able to add some Metrics to a registry which is needed to expose them. But wait … what we\nshould expose now?",[18,27311,27312],{},"Which possibilities do we have with metrics?",[18,27314,27315],{},"Well, there are 5 types of measurements included",[577,27317,27318,27321,27324,27327,27330],{},[580,27319,27320],{},"Gauge (instantaneous measurement of one value)",[580,27322,27323],{},"Histogram (measurement of value variants)",[580,27325,27326],{},"Timer (measurement of timings)",[580,27328,27329],{},"Counter (measurement of atomic longs)",[580,27331,27332],{},"Meter (measurement of ticks in a time range)",[18,27334,27335],{},"as well as some typically needed ones for special purposes like the",[18,27337,27338],{},[27,27339,27340],{},[573,27341,27342],{},"com.yammer.metrics.servlet.DefaultWebappMetricsFilter (we will discuss this later in the blog)",[18,27344,27345],{},"So for our example we should take two types of measurements: a Timer for measurement of request/response cycles and a\nsecond timer for measurement of the number crunching calculation.",[18,27347,27348],{},"Next step is to expose the measured values to a format you can use it. Metrics provides a lot of default exposements\nlike:",[577,27350,27351,27354,27357,27360,27363,27366,27369,27372],{},[580,27352,27353],{},"JMX",[580,27355,27356],{},"JSON",[580,27358,27359],{},"CSV",[580,27361,27362],{},"log4j / slf4j",[580,27364,27365],{},"logback",[580,27367,27368],{},"ganglia",[580,27370,27371],{},"graphite",[580,27373,17256],{},[18,27375,27376],{},"Of course you are able to create your own Reporters ’cause its open source software 🙂 For our example it’s enough using\none of the bundled reporters, e.G. the ConsoleReporter.",[18,27378,27379],{},"So, at a glance we need to do the following steps to enable a Java-Application with Metrics:",[18,27381,27382],{},"1.) Create and instantiate a MetricRegistry (i highly encourage you to inject them into your productive code!)",[18,27384,27385],{},"2.) Create Measurements to your needs (in our example the mentioned 2 timers)",[18,27387,27388],{},"3.) Create and instantiate a Reporter to your needs ( i highly encourage you to inject them into your productive code\n🙂",[18,27390,27391],{},"Let’s show this with a very straightforward coded application",[43,27393,27395],{"className":288,"code":27394,"language":290,"meta":48,"style":48},"\n final MetricRegistry metrics = new MetricRegistry(\"Demonstration\");\n evictions = metrics.counter(MetricRegistry.name(HealthCheckDemo.class, \"cache-evictions\"));\n request = metrics.timer(MetricRegistry.name(ArithmeticDemoOperation.class, \"calculation-duration\"));\n reporter = ConsoleReporter.forRegistry(metrics).build();\n jmxReporter = JmxReporter.forRegistry(metrics).build();\n reporter.start(1, TimeUnit.MINUTES); // should expose values every minute\n jmxReporter.start();\n\n",[50,27396,27397,27401,27406,27411,27416,27421,27426,27431],{"__ignoreMap":48},[53,27398,27399],{"class":55,"line":56},[53,27400,500],{"emptyLinePlaceholder":499},[53,27402,27403],{"class":55,"line":86},[53,27404,27405],{}," final MetricRegistry metrics = new MetricRegistry(\"Demonstration\");\n",[53,27407,27408],{"class":55,"line":126},[53,27409,27410],{}," evictions = metrics.counter(MetricRegistry.name(HealthCheckDemo.class, \"cache-evictions\"));\n",[53,27412,27413],{"class":55,"line":163},[53,27414,27415],{}," request = metrics.timer(MetricRegistry.name(ArithmeticDemoOperation.class, \"calculation-duration\"));\n",[53,27417,27418],{"class":55,"line":186},[53,27419,27420],{}," reporter = ConsoleReporter.forRegistry(metrics).build();\n",[53,27422,27423],{"class":55,"line":221},[53,27424,27425],{}," jmxReporter = JmxReporter.forRegistry(metrics).build();\n",[53,27427,27428],{"class":55,"line":242},[53,27429,27430],{}," reporter.start(1, TimeUnit.MINUTES); // should expose values every minute\n",[53,27432,27433],{"class":55,"line":273},[53,27434,27435],{}," jmxReporter.start();\n",[18,27437,27438],{},"After running this application you should see a console output like this:",[43,27440,27442],{"className":13786,"code":27441,"language":13788,"meta":48,"style":48},"\n05.05.13 08:22:03 ==============================================================\n-- Counters --------------------------------------------------------------------\norg.synyx.demos.HealthCheckDemo.cache-evictions\ncount = 1\n-- Timers ----------------------------------------------------------------------\norg.synyx.demos.ArithmeticDemoOperation.calculation-duration\ncount = 1\nmean rate = 0,02 calls/second\n1-minute rate = 0,09 calls/second\n5-minute rate = 0,17 calls/second\n15-minute rate = 0,19 calls/second\nmin = 1250,28 milliseconds\nmax = 1250,28 milliseconds\nmean = 1250,28 milliseconds\nstddev = 0,00 milliseconds\nmedian = 1250,28 milliseconds\n75% \u003C= 1250,28 milliseconds\n95% \u003C= 1250,28 milliseconds\n98% \u003C= 1250,28 milliseconds\n99% \u003C= 1250,28 milliseconds\n99.9% \u003C= 1250,28 milliseconds\n05.05.13 08:23:03 ==============================================================\n-- Counters --------------------------------------------------------------------\norg.synyx.demos.HealthCheckDemo.cache-evictions\ncount = 1\n-- Timers ----------------------------------------------------------------------\norg.synyx.demos.ArithmeticDemoOperation.calculation-duration\ncount = 1\nmean rate = 0,01 calls/second\n1-minute rate = 0,03 calls/second\n5-minute rate = 0,14 calls/second\n15-minute rate = 0,18 calls/second\nmin = 1250,28 milliseconds\nmax = 1250,28 milliseconds\nmean = 1250,28 milliseconds\nstddev = 0,00 milliseconds\nmedian = 1250,28 milliseconds\n75% \u003C= 1250,28 milliseconds\n95% \u003C= 1250,28 milliseconds\n98% \u003C= 1250,28 milliseconds\n99% \u003C= 1250,28 milliseconds\n99.9% \u003C= 1250,28 milliseconds\n\n",[50,27443,27444,27448,27453,27458,27463,27468,27473,27478,27482,27487,27492,27497,27502,27507,27512,27517,27522,27527,27532,27537,27542,27547,27552,27557,27561,27565,27569,27573,27577,27581,27586,27591,27596,27601,27605,27609,27613,27617,27621,27625,27629,27633,27637],{"__ignoreMap":48},[53,27445,27446],{"class":55,"line":56},[53,27447,500],{"emptyLinePlaceholder":499},[53,27449,27450],{"class":55,"line":86},[53,27451,27452],{},"05.05.13 08:22:03 ==============================================================\n",[53,27454,27455],{"class":55,"line":126},[53,27456,27457],{},"-- Counters --------------------------------------------------------------------\n",[53,27459,27460],{"class":55,"line":163},[53,27461,27462],{},"org.synyx.demos.HealthCheckDemo.cache-evictions\n",[53,27464,27465],{"class":55,"line":186},[53,27466,27467],{},"count = 1\n",[53,27469,27470],{"class":55,"line":221},[53,27471,27472],{},"-- Timers ----------------------------------------------------------------------\n",[53,27474,27475],{"class":55,"line":242},[53,27476,27477],{},"org.synyx.demos.ArithmeticDemoOperation.calculation-duration\n",[53,27479,27480],{"class":55,"line":273},[53,27481,27467],{},[53,27483,27484],{"class":55,"line":279},[53,27485,27486],{},"mean rate = 0,02 calls/second\n",[53,27488,27489],{"class":55,"line":496},[53,27490,27491],{},"1-minute rate = 0,09 calls/second\n",[53,27493,27494],{"class":55,"line":503},[53,27495,27496],{},"5-minute rate = 0,17 calls/second\n",[53,27498,27499],{"class":55,"line":509},[53,27500,27501],{},"15-minute rate = 0,19 calls/second\n",[53,27503,27504],{"class":55,"line":515},[53,27505,27506],{},"min = 1250,28 milliseconds\n",[53,27508,27509],{"class":55,"line":521},[53,27510,27511],{},"max = 1250,28 milliseconds\n",[53,27513,27514],{"class":55,"line":527},[53,27515,27516],{},"mean = 1250,28 milliseconds\n",[53,27518,27519],{"class":55,"line":533},[53,27520,27521],{},"stddev = 0,00 milliseconds\n",[53,27523,27524],{"class":55,"line":539},[53,27525,27526],{},"median = 1250,28 milliseconds\n",[53,27528,27529],{"class":55,"line":545},[53,27530,27531],{},"75% \u003C= 1250,28 milliseconds\n",[53,27533,27534],{"class":55,"line":2414},[53,27535,27536],{},"95% \u003C= 1250,28 milliseconds\n",[53,27538,27539],{"class":55,"line":2426},[53,27540,27541],{},"98% \u003C= 1250,28 milliseconds\n",[53,27543,27544],{"class":55,"line":2438},[53,27545,27546],{},"99% \u003C= 1250,28 milliseconds\n",[53,27548,27549],{"class":55,"line":2451},[53,27550,27551],{},"99.9% \u003C= 1250,28 milliseconds\n",[53,27553,27554],{"class":55,"line":2459},[53,27555,27556],{},"05.05.13 08:23:03 ==============================================================\n",[53,27558,27559],{"class":55,"line":2470},[53,27560,27457],{},[53,27562,27563],{"class":55,"line":2476},[53,27564,27462],{},[53,27566,27567],{"class":55,"line":2484},[53,27568,27467],{},[53,27570,27571],{"class":55,"line":2490},[53,27572,27472],{},[53,27574,27575],{"class":55,"line":2495},[53,27576,27477],{},[53,27578,27579],{"class":55,"line":2507},[53,27580,27467],{},[53,27582,27583],{"class":55,"line":2528},[53,27584,27585],{},"mean rate = 0,01 calls/second\n",[53,27587,27588],{"class":55,"line":2539},[53,27589,27590],{},"1-minute rate = 0,03 calls/second\n",[53,27592,27593],{"class":55,"line":2551},[53,27594,27595],{},"5-minute rate = 0,14 calls/second\n",[53,27597,27598],{"class":55,"line":2562},[53,27599,27600],{},"15-minute rate = 0,18 calls/second\n",[53,27602,27603],{"class":55,"line":2573},[53,27604,27506],{},[53,27606,27607],{"class":55,"line":2585},[53,27608,27511],{},[53,27610,27611],{"class":55,"line":2593},[53,27612,27516],{},[53,27614,27615],{"class":55,"line":2600},[53,27616,27521],{},[53,27618,27619],{"class":55,"line":2605},[53,27620,27526],{},[53,27622,27623],{"class":55,"line":2610},[53,27624,27531],{},[53,27626,27627],{"class":55,"line":2622},[53,27628,27536],{},[53,27630,27631],{"class":55,"line":2638},[53,27632,27541],{},[53,27634,27635],{"class":55,"line":2649},[53,27636,27546],{},[53,27638,27639],{"class":55,"line":2660},[53,27640,27551],{},[18,27642,27643],{},"Furthermore, if you debug the demo application you are able to inspect our exposements via a jmx-client like jVisualVM\nof jConsole after connecting.",[18,27645,27646],{},[2223,27647],{"alt":27648,"src":27649},"\"Bildschirmfoto 2013-09-02 um 12.08.02\"","https://media.synyx.de/uploads//2013/06/Bildschirmfoto-2013-09-02-um-12.08.02.png",[18,27651,27652],{},"SUCCESS!! o/ As you can see you are able to expose the same values to different reporters if you want to. Isn’t that\nnice? Yes it is!",[18,27654,27655],{},"Next time we will enable a java-webapplication with some measurements, so stay tuned!",[607,27657,989],{},{"title":48,"searchDepth":86,"depth":86,"links":27659},[],[613,996],"2013-09-02T11:50:58","Metrics by yammer provides runtime metrics and statistics for all kind of apps you can imagine. A lot of stuff is\\ndirectly useable out of the box, for example measuring request/response cycles of webapps and provide histograms of the\\nmeasured values. So, lets try enabling a simple Java-Application built by maven.","https://synyx.de/blog/yammer-metrics-made-easy-part-i/",{},"/blog/yammer-metrics-made-easy-part-i",{"title":27219,"description":27229},"blog/yammer-metrics-made-easy-part-i",[290,27669,22972,27670,18386,27671,27672],"javaee","measurement","monitoring","tomcat","Metrics by yammer provides runtime metrics and statistics for all kind of apps you can imagine. A lot of stuff is directly useable out of the box, for example measuring…","3qXtXK-jIyC3OdUZ5imnvlDvTO3UVK8aJvjo9_nNGT8",{"id":27676,"title":27677,"author":27678,"body":27679,"category":27841,"date":27842,"description":27843,"extension":617,"link":27844,"meta":27845,"navigation":499,"path":27846,"seo":27847,"slug":27683,"stem":27848,"tags":27849,"teaser":27853,"__hash__":27854},"blog/blog/usefulness-ranking-of-code-metrics.md","Usefulness Ranking of Code Metrics",[7799],{"type":11,"value":27680,"toc":27830},[27681,27684,27687,27695,27698,27701,27715,27718,27722,27731,27735,27738,27742,27745,27748,27751,27755,27758,27762,27771,27775,27778,27784,27788,27797,27801,27808,27812,27821,27824,27827],[14,27682,27677],{"id":27683},"usefulness-ranking-of-code-metrics",[18,27685,27686],{},"Static code analysis is one of the more controversial fields of software engineering. “Misleading Bogus!” screamers and\n“Must not work without it!” pleaders are bashing their heads in like survivors of a zombie war. My contribution to this\nargument is an attempt to evaluate the usefulness of different code analysis figures.",[18,27688,27689,27690,27694],{},"Since I mostly work in Java projects with ",[585,27691,27693],{"href":21537,"rel":27692,"title":27693},[589],"Sonar"," as main analysis tool, my ranking\nis centered on this environment. Some of the mentioned metrics don’t even exist outside of Sonar. Nonetheless much of it\nshould be easily transferable to other programming languages or software design in general.",[18,27696,27697],{},"Of course this ranking is highly subjective on my personal experience and only partly informational, since most\ndevelopers already know the meaning of all the code metrics. More than that I hope to trigger a discussion about their\nusefulness and importance.",[18,27699,27700],{},"First things first: What makes a code metric useful?",[577,27702,27703,27706,27709,27712],{},[580,27704,27705],{},"It outright shows a violation of or deviation from the defined best practice.",[580,27707,27708],{},"It hints to a place in your source code that has design flaws.",[580,27710,27711],{},"It shows that a certain aspect in your project is highly neglected and needs to be worked on.",[580,27713,27714],{},"You can quantify it by telling a “normal” or “good” value and react on it when this norm is violated",[18,27716,27717],{},"If any of these is true, a metric can be considered somewhat useful. So let’s dive into it and have a look at the\ndifferent metrics, starting with the most useful ones:",[2207,27719,27721],{"id":27720},"_1-cyclomatic-complexity","1. Cyclomatic Complexity",[18,27723,10847,27724,27730],{},[585,27725,27729],{"href":27726,"rel":27727,"title":27728},"http://en.wikipedia.org/wiki/Cyclomatic_complexity",[589],"Cyclomatic complexity","cyclomatic complexity"," of classes and\nmethods turns out to be my favorite code metric. It almost always hints to flawed design because too much decision logic\nis crammed into one method or class. The code often is not unit tested properly because complex units are very difficult\nto test – every different execution path in the code flow should have its own unit test. In general you should strive\nfor low complexity in every global and local scope of your project, which makes the cyclomatic complexity a very\nimportant and useful measurement.",[2207,27732,27734],{"id":27733},"_2-duplications","2. Duplications",[18,27736,27737],{},"Avoiding code duplication is a major topic of every code design author and that is for good reason. The corresponding\nmetric plain and simple points out your duplicated code and by doing so gives you opportunities to get rid of serious\nerror sources and to reduce future work. Often it tells you that a new layer of abstraction is needed or that you have\nto rethink your module/package/class structure to centralize the duplications. Without the duplication metric these\nplaces are very difficult to find.",[2207,27739,27741],{"id":27740},"_3-rules-compliance-sonar","3. Rules Compliance (Sonar)",[18,27743,27744],{},"The Sonar Rules Compliance (RC) shows the relative amount of coding rules violations in your project. Basically it runs\nstatic code analysis with PMD, Checkstyle, Findbugs etc.",[18,27746,27747],{},"Improving the RC forces the developers to learn how to avoid rules violations, thus improving the code quality. As a\nside effect different developers are forced to use the same coding style by following the same rules. Other than that,\nthe RC is a good measurement to get a rough impression of the overall code quality of a project because so many\ndifferent violations contribute to it. This also makes it a good instrument of comparison. You can compare the RC of\ndifferent projects to get a rough idea of their relative code quality.",[18,27749,27750],{},"Drilling down the specific violations sometimes hints to design flaws, although often the interesting violations are not\neasily identifiable in the mass of unimportant ones.",[2207,27752,27754],{"id":27753},"_4-package-tangling","4. Package Tangling",[18,27756,27757],{},"In the first place the package tangle index and similar metrics show you cyclic dependencies, which are always bad. In\naddition it can identify dependency magnets like util packages that are used all over the project, which makes changes\non them quite difficult.",[2207,27759,27761],{"id":27760},"_5-lcom4","5. LCOM4",[18,27763,10847,27764,27770],{},[585,27765,27769],{"href":27766,"rel":27767,"title":27768},"https://web.archive.org/web/20131129081756/http://docs.codehaus.org:80/display/SONAR/LCOM4+-+Lack+of+Cohesion+of+Methods",[589],"LCOM4","Lack of Cohesion of Methods","\ntells you how much the methods inside a class belong together by measuring if they use the same members of the class. An\nLCOM higher than one often leads you to a violation of the Single Responsibility Principle.",[2207,27772,27774],{"id":27773},"_6-lines-of-code","6. Lines of Code",[18,27776,27777],{},"Wait, what? Lines of code is not at the end of the list? Isn’t that just that bogus number, which tells us absolutely\nnothing and encouraged developers in the past to produce crap because they were paid by lines of code?",[18,27779,27780,27781,27783],{},"Well, on the one hand this is true – on the other hand it isn’t ",[573,27782,17782],{}," useless in my opinion. If you have a look at the\namount of lines of code broken down by class or method you will almost always find a badly designed piece of code at the\ntop of the list. Most of the times the largest class in a project is the “black sheep”, which every developer fears\nchanging and where redesign is needed the most. Lines of Code per class helps you identifying it.",[2207,27785,27787],{"id":27786},"_7-sonar-quality-index","7. Sonar Quality Index",[18,27789,27790,27791,27796],{},"Sonar’s ",[585,27792,27795],{"href":27793,"rel":27794,"title":27795},"https://web.archive.org/web/20150118055327/http://docs.codehaus.org:80/display/SONAR/Quality+Index+Plugin",[589],"Quality Index","\ntries to merge several other Indexes into one number to give an overall indicator for code quality. It doesn’t do a very\ngood job though, because the formulas and weightings behind it are not really intuitive, which makes it a pretty\nintransparent and meaningless measurement. You can use it to roughly compare projects with one another but it won’t\nreally help to increase your code quality.",[2207,27798,27800],{"id":27799},"_8-sonar-complexity-factor","8. Sonar Complexity Factor",[18,27802,27803,27804,27807],{},"The Sonar Complexity Factor is so far down the list because it is always zero. Always! You say I am lying and you have\nseen it above zero? Well, then measuring the code quality of your project is one of your lesser problems. The Complexity\nFactor only rises above zero, when you have a cyclomatic complexity of 31 or more somewhere in your code. That means a\nmethod with ",[573,27805,27806],{},"31 or more different execution paths",". That’s the kind of code you don’t want to change anymore, let alone\nfix it. You just wanna release it from its pain and throw it away. And a metric that only shows something, when the game\nover screen is already flashing in front of you, doesn’t help at all.",[2207,27809,27811],{"id":27810},"_9-lines-of-comments","9. Lines of Comments",[18,27813,27814,27815,27820],{},"Counting lines of comments to evaluate your code is one of the worst ideas I’ve heard. Sonar for example even tells you\nthat it is good to have more lines of comments – whaaaat? Didn’t we learn in Uncle\nBob’s ",[585,27816,27819],{"href":27817,"rel":27818,"title":27819},"http://www.amazon.com/Clean-Code-Handbook-Software-Craftsmanship/dp/0132350882",[589],"Clean Code"," that\n“comments are not like Schindler’s list, they are not pure good”?",[18,27822,27823],{},"Yes, you should describe your API properly with Javadoc. But you should try to reduce the comments describing your code,\nthe code should describe itself. These opposing goals make it impossible to tell a “good” amount of comments, thus\nmaking this metric completely useless.",[18,27825,27826],{},"OK, that’s it. I hope I could give you a little insight to the usefulness of some code metric numbers or at least got a\n“bullshit, that guy has no clue” out of you to animate you for discussion.",[18,27828,27829],{},"One more thing: In my opinion a newly started greenfield project should try to keep the first five of the list at\noptimum (duplication and dependency cycles at 0, LCOM at 1, complexity near 1, Rules Compliance at 100%), which is not\nimpossible and not only gives you a very good feeling about your code but also saves you a lot of work in the long run.",{"title":48,"searchDepth":86,"depth":86,"links":27831},[27832,27833,27834,27835,27836,27837,27838,27839,27840],{"id":27720,"depth":86,"text":27721},{"id":27733,"depth":86,"text":27734},{"id":27740,"depth":86,"text":27741},{"id":27753,"depth":86,"text":27754},{"id":27760,"depth":86,"text":27761},{"id":27773,"depth":86,"text":27774},{"id":27786,"depth":86,"text":27787},{"id":27799,"depth":86,"text":27800},{"id":27810,"depth":86,"text":27811},[613],"2013-07-01T10:53:44","Static code analysis is one of the more controversial fields of software engineering. “Misleading Bogus!” screamers and\\n“Must not work without it!” pleaders are bashing their heads in like survivors of a zombie war. My contribution to this\\nargument is an attempt to evaluate the usefulness of different code analysis figures.","https://synyx.de/blog/usefulness-ranking-of-code-metrics/",{},"/blog/usefulness-ranking-of-code-metrics",{"title":27677,"description":27686},"blog/usefulness-ranking-of-code-metrics",[27850,21725,27851,27852],"code-metrics","sonar","static-code-analysis","Static code analysis is one of the more controversial fields of software engineering. “Misleading Bogus!” screamers and “Must not work without it!” pleaders are bashing their heads in like survivors…","wenHIR1volcE7QrfG0x3m6zQmC8Efe8TxqlNM0COPrA",{"id":27856,"title":27857,"author":27858,"body":27859,"category":28498,"date":28499,"description":27866,"extension":617,"link":28500,"meta":28501,"navigation":499,"path":28502,"seo":28503,"slug":27863,"stem":28504,"tags":28505,"teaser":28507,"__hash__":28508},"blog/blog/awesome-css-3-layouting.md","Awesome CSS 3 Layouting",[6892],{"type":11,"value":27860,"toc":28493},[27861,27864,27867,27881,27896,27909,27912,27915,27921,28091,28117,28137,28140,28144,28158,28164,28442,28448,28459,28462,28465,28468,28479,28488,28491],[14,27862,27857],{"id":27863},"awesome-css-3-layouting",[18,27865,27866],{},"At first let me ask you a few questions about developing web applications:",[577,27868,27869,27872,27875,27878],{},[580,27870,27871],{},"How do you create multiple column layouts?",[580,27873,27874],{},"How do you make it flexible?",[580,27876,27877],{},"How do you solve the 100% height problem?",[580,27879,27880],{},"How do you make it responsiveness for Desktop vs Mobile?",[18,27882,27883,27884,27887,27888,27891,27892,27895],{},"If one of your answers contained ‘",[573,27885,27886],{},"absolute positioning","‘, ‘",[573,27889,27890],{},"floating","‘ or ‘",[573,27893,27894],{},"JavaScript","‘ you’re welcome for further\nreading about my favourite CSS 3 features. Wait, CSS 3? The thing that enables rounded corners and gradients? Yep,\nexactly, and despite the new trend of flat design CSS 3 is still useful since it has a bit more to offer than rounded\ncorners and gradients.",[18,27897,27898],{},[27,27899,27900,27903,27904,8713,27907],{},[14675,27901,27902],{},"Unfortunately this is still bleeding edge and even Firefox (version 21.0 on ubuntu 13.04) doesn’t render the\nexamples."," Update: A few hours ago version 22 of Firefox was released with support for ",[50,27905,27906],{},"display: flex",[573,27908,11772],{},[18,27910,27911],{},"Feel free to add working examples in the comments below 🙂",[2207,27913,27906],{"id":27914},"display-flex",[18,27916,27917,27918,27920],{},"My most favourite feature is the new display property ",[50,27919,27906],{},". This solves the first three questions including\nthe most painful 100% height problem. Remember the ugly JavaScript workarounds to set the height equally to to highest\ndiv? Or the abuse of the border attribute and absolute positioned divs? Well, forget that. All you gonna need in the\nfuture are a few lines of css code.",[43,27922,27924],{"className":13786,"code":27923,"language":13788,"meta":48,"style":48},".container {\n display: -webkit-flex;\n display: flex;\n}\n.menu {\n overflow: hidden;\n background-color: #D8E47F;\n -webkit-flex: 1;\n flex: 1;\n}\n.content {\n -webkit-flex: 3;\n flex: 3;\n}\n.sidenote {\n padding: 1em;\n background-color: #D8E47F;\n -webkit-flex: 2;\n flex: 2;\n}\n.menu > ul {\n margin: 1em;\n list-style-type: none;\n white-space: nowrap;\n}\n.menu a {\n color: black;\n}\n.content article {\n margin: 1em;\n}\n* {\n padding: 0;\n margin: 0;\n}\n\n",[50,27925,27926,27931,27936,27941,27945,27950,27955,27960,27965,27970,27974,27979,27984,27989,27993,27998,28003,28007,28012,28017,28021,28026,28031,28036,28041,28045,28050,28055,28059,28064,28068,28072,28077,28082,28087],{"__ignoreMap":48},[53,27927,27928],{"class":55,"line":56},[53,27929,27930],{},".container {\n",[53,27932,27933],{"class":55,"line":86},[53,27934,27935],{}," display: -webkit-flex;\n",[53,27937,27938],{"class":55,"line":126},[53,27939,27940],{}," display: flex;\n",[53,27942,27943],{"class":55,"line":163},[53,27944,282],{},[53,27946,27947],{"class":55,"line":186},[53,27948,27949],{},".menu {\n",[53,27951,27952],{"class":55,"line":221},[53,27953,27954],{}," overflow: hidden;\n",[53,27956,27957],{"class":55,"line":242},[53,27958,27959],{}," background-color: #D8E47F;\n",[53,27961,27962],{"class":55,"line":273},[53,27963,27964],{}," -webkit-flex: 1;\n",[53,27966,27967],{"class":55,"line":279},[53,27968,27969],{}," flex: 1;\n",[53,27971,27972],{"class":55,"line":496},[53,27973,282],{},[53,27975,27976],{"class":55,"line":503},[53,27977,27978],{},".content {\n",[53,27980,27981],{"class":55,"line":509},[53,27982,27983],{}," -webkit-flex: 3;\n",[53,27985,27986],{"class":55,"line":515},[53,27987,27988],{}," flex: 3;\n",[53,27990,27991],{"class":55,"line":521},[53,27992,282],{},[53,27994,27995],{"class":55,"line":527},[53,27996,27997],{},".sidenote {\n",[53,27999,28000],{"class":55,"line":533},[53,28001,28002],{}," padding: 1em;\n",[53,28004,28005],{"class":55,"line":539},[53,28006,27959],{},[53,28008,28009],{"class":55,"line":545},[53,28010,28011],{}," -webkit-flex: 2;\n",[53,28013,28014],{"class":55,"line":2414},[53,28015,28016],{}," flex: 2;\n",[53,28018,28019],{"class":55,"line":2426},[53,28020,282],{},[53,28022,28023],{"class":55,"line":2438},[53,28024,28025],{},".menu > ul {\n",[53,28027,28028],{"class":55,"line":2451},[53,28029,28030],{}," margin: 1em;\n",[53,28032,28033],{"class":55,"line":2459},[53,28034,28035],{}," list-style-type: none;\n",[53,28037,28038],{"class":55,"line":2470},[53,28039,28040],{}," white-space: nowrap;\n",[53,28042,28043],{"class":55,"line":2476},[53,28044,282],{},[53,28046,28047],{"class":55,"line":2484},[53,28048,28049],{},".menu a {\n",[53,28051,28052],{"class":55,"line":2490},[53,28053,28054],{}," color: black;\n",[53,28056,28057],{"class":55,"line":2495},[53,28058,282],{},[53,28060,28061],{"class":55,"line":2507},[53,28062,28063],{},".content article {\n",[53,28065,28066],{"class":55,"line":2528},[53,28067,28030],{},[53,28069,28070],{"class":55,"line":2539},[53,28071,282],{},[53,28073,28074],{"class":55,"line":2551},[53,28075,28076],{},"* {\n",[53,28078,28079],{"class":55,"line":2562},[53,28080,28081],{}," padding: 0;\n",[53,28083,28084],{"class":55,"line":2573},[53,28085,28086],{}," margin: 0;\n",[53,28088,28089],{"class":55,"line":2585},[53,28090,282],{},[18,28092,28093,28094,28097,28098,28101,28102,28105,28106,28109,28110,28112,28113,28116],{},"The value of the ",[50,28095,28096],{},"flex"," property tells the browser how much space the section should fill of the available place. In our\nexample the ",[573,28099,28100],{},".main-nav"," is the smallest, followed by ",[573,28103,28104],{},".side-note"," which is twice as big and by ",[573,28107,28108],{},".content"," which is\nthree times as big as the ",[573,28111,28100],{},". The children of ",[573,28114,28115],{},".wrapper"," are flexible (as the name flex tells us), in other\nwords these sections will adjust their width relatively to the parent element. Feel free to play around with the code\npen above! Open it in a new window and change the browser size and see how the other columns adapt their height\nautomatically to the ‘master’ column. Awesome, isn’t it? No more JavaScript calculations or ugly CSS workarounds for\nthis trivial use case.",[18,28118,28119,28120,28123,28124,28127,28128,28130,28131,986],{},"Now what if you want a static width for the ",[573,28121,28122],{},".main-menu",". Well, just remove ",[50,28125,28126],{},"flex: 1"," or replace it by a width\ndeclaration, that’s it. The other columns will take the remaining place that is left, of course relative to it’s set\n",[50,28129,28096],{}," value. If you’re interested to dive deaper into the amazing flexbox layout I recommend the article\non ",[585,28132,28136],{"href":28133,"rel":28134,"title":28135},"http://css-tricks.com/snippets/css/a-guide-to-flexbox/",[589],"css-tricks.com | flexbox","css-tricks.com",[18,28138,28139],{},"Impressed so far? We’re just getting started with CSS 3!",[2207,28141,28143],{"id":28142},"media","@media",[18,28145,28146,28147,28150,28151,28157],{},"Nowadays we developers ",[14675,28148,28149],{},"hopefully"," want to support various devices and resolutions. How often do I curse webpages\nwhile surfing on it with my smartphone. Most smartphones uses Webkit as browser platform, so maybe it’s worth to take a\nlook\nat ",[585,28152,28156],{"href":28153,"rel":28154,"title":28155},"https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Media_queries",[589],"MDN | Media Queries","CSS 3 Media Queries","\neven now.",[18,28159,28160,28161,28163],{},"In the codepen below the ",[50,28162,28143],{}," is at the bottom since it must override the default css values. In this example the\nsidenote will be hidden if the display is too small. Furthermore the menu on the left will be positioned at the top when\ndecreasing the display size even more.",[43,28165,28167],{"className":13786,"code":28166,"language":13788,"meta":48,"style":48},".container {\n display: -webkit-flex;\n display: flex;\n}\n.menu {\n overflow: hidden;\n background-color: #D8E47F;\n -webkit-flex: 1;\n flex: 1;\n -webkit-transition: -webkit-flex 1s;\n transition: flex 1s;\n}\n.content {\n -webkit-flex: 3;\n flex: 3;\n}\n.sidenote {\n padding: 1em;\n background-color: #D8E47F;\n -webkit-flex: 2;\n flex: 2;\n}\n.menu > ul {\n margin: 1em;\n list-style-type: none;\n white-space: nowrap;\n}\n.menu a {\n color: black;\n}\n.content article {\n margin: 1em;\n}\n@media (max-width: 800px) {\n .sidenote {\n display: none;\n }\n}\n@media (max-width: 400px) {\n .menu {\n position: fixed;\n top: 0;\n left: 0;\n right: 0;\n }\n .menu > ul {\n display: -webkit-flex;\n display: -moz-box;\n display: flex;\n }\n .menu > ul > li {\n padding: 0 1em;\n -webkit-flex: 1;\n flex: 1;\n }\n .content {\n padding-top: 3em\n }\n}\n* {\n padding: 0;\n margin: 0;\n}\n\n",[50,28168,28169,28173,28177,28181,28185,28189,28193,28197,28201,28205,28210,28215,28219,28223,28227,28231,28235,28239,28243,28247,28251,28255,28259,28263,28267,28271,28275,28279,28283,28287,28291,28295,28299,28303,28308,28313,28318,28322,28326,28331,28336,28341,28346,28351,28356,28360,28365,28370,28375,28380,28384,28389,28394,28399,28404,28408,28413,28418,28422,28426,28430,28434,28438],{"__ignoreMap":48},[53,28170,28171],{"class":55,"line":56},[53,28172,27930],{},[53,28174,28175],{"class":55,"line":86},[53,28176,27935],{},[53,28178,28179],{"class":55,"line":126},[53,28180,27940],{},[53,28182,28183],{"class":55,"line":163},[53,28184,282],{},[53,28186,28187],{"class":55,"line":186},[53,28188,27949],{},[53,28190,28191],{"class":55,"line":221},[53,28192,27954],{},[53,28194,28195],{"class":55,"line":242},[53,28196,27959],{},[53,28198,28199],{"class":55,"line":273},[53,28200,27964],{},[53,28202,28203],{"class":55,"line":279},[53,28204,27969],{},[53,28206,28207],{"class":55,"line":496},[53,28208,28209],{}," -webkit-transition: -webkit-flex 1s;\n",[53,28211,28212],{"class":55,"line":503},[53,28213,28214],{}," transition: flex 1s;\n",[53,28216,28217],{"class":55,"line":509},[53,28218,282],{},[53,28220,28221],{"class":55,"line":515},[53,28222,27978],{},[53,28224,28225],{"class":55,"line":521},[53,28226,27983],{},[53,28228,28229],{"class":55,"line":527},[53,28230,27988],{},[53,28232,28233],{"class":55,"line":533},[53,28234,282],{},[53,28236,28237],{"class":55,"line":539},[53,28238,27997],{},[53,28240,28241],{"class":55,"line":545},[53,28242,28002],{},[53,28244,28245],{"class":55,"line":2414},[53,28246,27959],{},[53,28248,28249],{"class":55,"line":2426},[53,28250,28011],{},[53,28252,28253],{"class":55,"line":2438},[53,28254,28016],{},[53,28256,28257],{"class":55,"line":2451},[53,28258,282],{},[53,28260,28261],{"class":55,"line":2459},[53,28262,28025],{},[53,28264,28265],{"class":55,"line":2470},[53,28266,28030],{},[53,28268,28269],{"class":55,"line":2476},[53,28270,28035],{},[53,28272,28273],{"class":55,"line":2484},[53,28274,28040],{},[53,28276,28277],{"class":55,"line":2490},[53,28278,282],{},[53,28280,28281],{"class":55,"line":2495},[53,28282,28049],{},[53,28284,28285],{"class":55,"line":2507},[53,28286,28054],{},[53,28288,28289],{"class":55,"line":2528},[53,28290,282],{},[53,28292,28293],{"class":55,"line":2539},[53,28294,28063],{},[53,28296,28297],{"class":55,"line":2551},[53,28298,28030],{},[53,28300,28301],{"class":55,"line":2562},[53,28302,282],{},[53,28304,28305],{"class":55,"line":2573},[53,28306,28307],{},"@media (max-width: 800px) {\n",[53,28309,28310],{"class":55,"line":2585},[53,28311,28312],{}," .sidenote {\n",[53,28314,28315],{"class":55,"line":2593},[53,28316,28317],{}," display: none;\n",[53,28319,28320],{"class":55,"line":2600},[53,28321,7384],{},[53,28323,28324],{"class":55,"line":2605},[53,28325,282],{},[53,28327,28328],{"class":55,"line":2610},[53,28329,28330],{},"@media (max-width: 400px) {\n",[53,28332,28333],{"class":55,"line":2622},[53,28334,28335],{}," .menu {\n",[53,28337,28338],{"class":55,"line":2638},[53,28339,28340],{}," position: fixed;\n",[53,28342,28343],{"class":55,"line":2649},[53,28344,28345],{}," top: 0;\n",[53,28347,28348],{"class":55,"line":2660},[53,28349,28350],{}," left: 0;\n",[53,28352,28353],{"class":55,"line":2672},[53,28354,28355],{}," right: 0;\n",[53,28357,28358],{"class":55,"line":2683},[53,28359,7384],{},[53,28361,28362],{"class":55,"line":2694},[53,28363,28364],{}," .menu > ul {\n",[53,28366,28367],{"class":55,"line":2701},[53,28368,28369],{}," display: -webkit-flex;\n",[53,28371,28372],{"class":55,"line":2708},[53,28373,28374],{}," display: -moz-box;\n",[53,28376,28377],{"class":55,"line":2713},[53,28378,28379],{}," display: flex;\n",[53,28381,28382],{"class":55,"line":2718},[53,28383,7384],{},[53,28385,28386],{"class":55,"line":2730},[53,28387,28388],{}," .menu > ul > li {\n",[53,28390,28391],{"class":55,"line":2755},[53,28392,28393],{}," padding: 0 1em;\n",[53,28395,28396],{"class":55,"line":2767},[53,28397,28398],{}," -webkit-flex: 1;\n",[53,28400,28401],{"class":55,"line":2778},[53,28402,28403],{}," flex: 1;\n",[53,28405,28406],{"class":55,"line":2789},[53,28407,7384],{},[53,28409,28410],{"class":55,"line":2800},[53,28411,28412],{}," .content {\n",[53,28414,28415],{"class":55,"line":2812},[53,28416,28417],{}," padding-top: 3em\n",[53,28419,28420],{"class":55,"line":2819},[53,28421,7384],{},[53,28423,28424],{"class":55,"line":2832},[53,28425,282],{},[53,28427,28428],{"class":55,"line":2845},[53,28429,28076],{},[53,28431,28432],{"class":55,"line":2856},[53,28433,28081],{},[53,28435,28436],{"class":55,"line":2861},[53,28437,28086],{},[53,28439,28440],{"class":55,"line":2868},[53,28441,282],{},[18,28443,28444,28445,28447],{},"A more complex use case could be the alignment of the navigation dependent of the screen size. With ",[50,28446,28143],{}," we can\nsimply place it on the top if the screen is small like on a smartphone or place it on the right if the page is visited\nby a desktop browser or even by a mobile one in landscape mode. And all magic is done with CSS only! Again, feel free to\nplay around with the given codepen.",[18,28449,28450,28451,28453,28454,986],{},"There are a lot more ",[50,28452,28143],{}," properties and possibilities that can be best read\non ",[585,28455,28136],{"href":28456,"rel":28457,"title":28458},"http://css-tricks.com/css-media-queries/",[589],"css-tricks | Media Queries",[2207,28460,28461],{"id":28461},"transitions",[18,28463,28464],{},"Last but not least let me introduce css transitions if you never heard of it so far. As well as you won’t need\nJavaScript for 100% height calculations anymore you won’t need it for simple animations.",[18,28466,28467],{},"Let’s imagine that we have too much content and want to use every pixel the display gives us, but we cannot hide the\nmenu on the left because it’s too important!!1!. Why don’t we let the user decide what is important or not? And with\nsome extra animation he will love our application even more!",[18,28469,28470,28471,28474,28475,28478],{},"Due to simple adding or removing the class ",[573,28472,28473],{},".hidden"," we can change the width of the menu container. The ",[50,28476,28477],{},"transition","\nproperty takes all the magic for us and animates the width change. Try to increase the duration and click ‘hide’ and\n‘show’ in the codepen before the animation time is over. Note how the animation stops immediately and returns to the\nprevious state as soon as you click again. Did you ever implemented something like this with JavaScript? Luckily I\ndidn’t.",[18,28480,28481,28482,28487],{},"As well as the above mentioned CSS 3 features, transitions are much more powerful\nand ",[585,28483,28136],{"href":28484,"rel":28485,"title":28486},"http://css-tricks.com/search-results/?q=transition",[589],"css-tricks.com | transition"," is a nice source\nto learn more and to play with advanced examples.",[18,28489,28490],{},"Thanks for reading! And hopefully we can enjoy the full power of CSS 3 as soon as possible throughout all modern\nbrowsers (even IE…).",[607,28492,989],{},{"title":48,"searchDepth":86,"depth":86,"links":28494},[28495,28496,28497],{"id":27914,"depth":86,"text":27906},{"id":28142,"depth":86,"text":28143},{"id":28461,"depth":86,"text":28461},[613],"2013-06-26T11:51:48","https://synyx.de/blog/awesome-css-3-layouting/",{},"/blog/awesome-css-3-layouting",{"title":27857,"description":27866},"blog/awesome-css-3-layouting",[28506],"css3","At first let me ask you a few questions about developing web applications: How do you create multiple column layouts? How do you make it flexible? How do you solve…","Y2DiBD_KnuEqdChUKF_H_3Qaxz2XX93qcAd5uc647dY",{"id":28510,"title":28511,"author":28512,"body":28514,"category":29471,"date":29472,"description":29473,"extension":617,"link":29474,"meta":29475,"navigation":499,"path":29476,"seo":29477,"slug":29479,"stem":29480,"tags":29481,"teaser":29483,"__hash__":29484},"blog/blog/asynchronous-concurrency-with-vert-x-part-2.md","Asynchronous concurrency with vert.x – Part 2",[28513],"allmendinger",{"type":11,"value":28515,"toc":29469},[28516,28519,28534,28548,28551,28574,28577,28601,28604,28667,28670,28678,28681,28696,28703,28969,28972,28975,29204,29207,29236,29243,29312,29315,29394,29405,29444,29447,29465,29467],[14,28517,28511],{"id":28518},"asynchronous-concurrency-with-vertx-part-2",[18,28520,28521,28522,28527,28528,28533],{},"CoffeeScript\nVert.x supports JavaScript through the ",[585,28523,28526],{"href":28524,"rel":28525},"https://developer.mozilla.org/en/docs/Rhino",[589],"Rhino JavaScript engine",". Although\nJavaScript is a decent language once you get to know it, I prefer ",[585,28529,28532],{"href":28530,"rel":28531},"http://www.coffeescript.org",[589],"CoffeeScript",", a\nlanguage that compiles to JavaScript. Luckily, vert.x has built-in support for CoffeeScript, so I can use it nearly\ntransparently. You will only notice the JavaScript under the hood when reading stack traces, which will refer to the\ncompiled JavaScript file.\nFor the examples in this blog post, the only thing you need to know a little CoffeeScript:",[43,28535,28537],{"className":13786,"code":28536,"language":13788,"meta":48,"style":48},"\nfoo = (a, b) -> a + b\n\n",[50,28538,28539,28543],{"__ignoreMap":48},[53,28540,28541],{"class":55,"line":56},[53,28542,500],{"emptyLinePlaceholder":499},[53,28544,28545],{"class":55,"line":86},[53,28546,28547],{},"foo = (a, b) -> a + b\n",[18,28549,28550],{},"Translates to the JavaScript code",[43,28552,28554],{"className":13786,"code":28553,"language":13788,"meta":48,"style":48},"\nvar foo = function (a, b) {\n return a + b; // (the last statement is returned)\n}\n\n",[50,28555,28556,28560,28565,28570],{"__ignoreMap":48},[53,28557,28558],{"class":55,"line":56},[53,28559,500],{"emptyLinePlaceholder":499},[53,28561,28562],{"class":55,"line":86},[53,28563,28564],{},"var foo = function (a, b) {\n",[53,28566,28567],{"class":55,"line":126},[53,28568,28569],{}," return a + b; // (the last statement is returned)\n",[53,28571,28572],{"class":55,"line":163},[53,28573,282],{},[18,28575,28576],{},"Also parentheses around function arguments are optional",[43,28578,28580],{"className":13786,"code":28579,"language":13788,"meta":48,"style":48},"\n foo a, b, c\n # same as\n foo(a, b, c)\n\n",[50,28581,28582,28586,28591,28596],{"__ignoreMap":48},[53,28583,28584],{"class":55,"line":56},[53,28585,500],{"emptyLinePlaceholder":499},[53,28587,28588],{"class":55,"line":86},[53,28589,28590],{}," foo a, b, c\n",[53,28592,28593],{"class":55,"line":126},[53,28594,28595],{}," # same as\n",[53,28597,28598],{"class":55,"line":163},[53,28599,28600],{}," foo(a, b, c)\n",[18,28602,28603],{},"The translated source code from the example described in the last post is",[43,28605,28607],{"className":13786,"code":28606,"language":13788,"meta":48,"style":48},"\nvertx = require 'vertx'\naddress = 'example.address'\nhandler = (message, replier) ->\n stdout.println \"sender sent \" + message\n replier \"pong 1\", (message, replier) ->\n # and so on\nvertx.eventBus.registerHandler address, handler\nvertx.eventBus.send address, \"ping 1\", (message, replier) ->\n stdout.println \"handler sent \" + message\n replier \"ping 2\", (message, replier) ->\n # and so on\n\n",[50,28608,28609,28613,28618,28623,28628,28633,28638,28643,28648,28653,28658,28663],{"__ignoreMap":48},[53,28610,28611],{"class":55,"line":56},[53,28612,500],{"emptyLinePlaceholder":499},[53,28614,28615],{"class":55,"line":86},[53,28616,28617],{},"vertx = require 'vertx'\n",[53,28619,28620],{"class":55,"line":126},[53,28621,28622],{},"address = 'example.address'\n",[53,28624,28625],{"class":55,"line":163},[53,28626,28627],{},"handler = (message, replier) ->\n",[53,28629,28630],{"class":55,"line":186},[53,28631,28632],{}," stdout.println \"sender sent \" + message\n",[53,28634,28635],{"class":55,"line":221},[53,28636,28637],{}," replier \"pong 1\", (message, replier) ->\n",[53,28639,28640],{"class":55,"line":242},[53,28641,28642],{}," # and so on\n",[53,28644,28645],{"class":55,"line":273},[53,28646,28647],{},"vertx.eventBus.registerHandler address, handler\n",[53,28649,28650],{"class":55,"line":279},[53,28651,28652],{},"vertx.eventBus.send address, \"ping 1\", (message, replier) ->\n",[53,28654,28655],{"class":55,"line":496},[53,28656,28657],{}," stdout.println \"handler sent \" + message\n",[53,28659,28660],{"class":55,"line":503},[53,28661,28662],{}," replier \"ping 2\", (message, replier) ->\n",[53,28664,28665],{"class":55,"line":509},[53,28666,28642],{},[18,28668,28669],{},"The shorter function declaration notation is a huge improvement, especially when dealing with the kind of\ncallback-heavy code that is prevalent when dealing with asynchronous concurrency.",[18,28671,28672,28673,4101],{},"The Sleeping Barber Problem\nTo challenge vert.x with something more exciting than ping-pong, I decided to model a basic concurrency problem that\nmirrors some of the challenges that our new application will face – the\nfamous ",[585,28674,28677],{"href":28675,"rel":28676},"http://en.wikipedia.org/wiki/Sleeping_barber_problem",[589],"Sleeping Barber Problem",[18,28679,28680],{},"The analogy is based upon a hypothetical barber shop with one barber. The barber has one barber chair and a waiting room\nwith a number of chairs in it. When the barber finishes cutting a customer’s hair, he dismisses the customer and then\ngoes to the waiting room to see if there are other customers waiting. If there are, he brings one of them back to the\nchair and cuts his hair. If there are no other customers waiting, he returns to his chair and sleeps in it.\nEach customer, when he arrives, looks to see what the barber is doing. If the barber is sleeping, then the customer\nwakes him up and sits in the chair. If the barber is cutting hair, then the customer goes to the waiting room. If there\nis a free chair in the waiting room, the customer sits in it and waits his turn. If there is no free chair, then the\ncustomer leaves. Based on a naïve analysis, the above description should ensure that the shop functions correctly, with\nthe barber cutting the hair of anyone who arrives until there are no more customers, and then sleeping until the next\ncustomer arrives. In practice, there are a number of problems that can occur that are illustrative of general scheduling\nproblems.",[18,28682,28683,28684,28689,28690,28695],{},"I’ve ",[585,28685,28688],{"href":28686,"rel":28687},"https://github.com/OttoAllmendinger/term-paper-stm",[589],"previously solved this problem","\nusing ",[585,28691,28694],{"href":28692,"rel":28693},"http://en.wikipedia.org/wiki/Software_transactional_memory",[589],"Software Transactional Memory"," and was interested how\nthe message-passing style of vert.x compares.",[18,28697,28698,28699,28702],{},"Barber.coffee\nThe barber shop problem nicely separates into two systems: a ",[50,28700,28701],{},"barber"," message handler that keeps track of incoming\ncustomers and manages the queue, and set of callback methods representing the customer, which initiate a communication\nsequence with the message handler. The following code defines the barber message handler.",[43,28704,28706],{"className":13786,"code":28705,"language":13788,"meta":48,"style":48},"\nvertx = require 'vertx'\naddr = 'barber'\nwaitTime = -> Math.random() * 100\nbarber = ->\n # the state of the message handler lives\n # in this closure\n busy = false\n queue = []\n freeSeats = 20\n # make the system a little indeterministic\n log = (message) ->\n stdout.println \"barber: #{message}\"\n # the following methods define the core behavior\n checkQueue = ->\n if queue.length > 0\n serveCustomer queue.shift()\n freeSeats += 1\n return true\n else\n return false\n serveCustomer = ({customer, replier}) ->\n log \"serving #{customer}\"\n busy = true\n replier 'serve', (message, replier) ->\n vertx.setTimer waitTime(), ->\n log \"done serving #{customer}\"\n busy = checkQueue()\n replier 'done'\n # this is the handler's callback method that\n # is being returned by the barber function\n (message, replier) ->\n customer = message\n if busy\n # there is an intermediate state where we know that we\n # have to queue the customer because there aren't any\n # free seats, but the customer must first acknowledge\n # the waiting state before we can actually put him in\n # the queue.\n if freeSeats > 0\n freeSeats -= 1\n log \"sending #{customer} to queue\"\n replier 'busy', (message, replier) ->\n # customer waiting ack\n queue.push {customer, replier}\n log \"queued #{customer} - \" +\n \"length: #{queue.length} - free seats: #{freeSeats}\"\n else\n replier 'full'\n else\n serveCustomer {customer, replier}\nexports.start = ->\n vertx.eventBus.registerHandler addr, barber()\n\n",[50,28707,28708,28712,28716,28721,28726,28731,28736,28741,28746,28751,28756,28761,28766,28771,28776,28781,28786,28791,28796,28801,28806,28810,28815,28820,28825,28830,28835,28840,28845,28850,28855,28860,28865,28870,28875,28880,28885,28890,28895,28900,28905,28910,28915,28920,28925,28930,28935,28940,28945,28950,28954,28959,28964],{"__ignoreMap":48},[53,28709,28710],{"class":55,"line":56},[53,28711,500],{"emptyLinePlaceholder":499},[53,28713,28714],{"class":55,"line":86},[53,28715,28617],{},[53,28717,28718],{"class":55,"line":126},[53,28719,28720],{},"addr = 'barber'\n",[53,28722,28723],{"class":55,"line":163},[53,28724,28725],{},"waitTime = -> Math.random() * 100\n",[53,28727,28728],{"class":55,"line":186},[53,28729,28730],{},"barber = ->\n",[53,28732,28733],{"class":55,"line":221},[53,28734,28735],{}," # the state of the message handler lives\n",[53,28737,28738],{"class":55,"line":242},[53,28739,28740],{}," # in this closure\n",[53,28742,28743],{"class":55,"line":273},[53,28744,28745],{}," busy = false\n",[53,28747,28748],{"class":55,"line":279},[53,28749,28750],{}," queue = []\n",[53,28752,28753],{"class":55,"line":496},[53,28754,28755],{}," freeSeats = 20\n",[53,28757,28758],{"class":55,"line":503},[53,28759,28760],{}," # make the system a little indeterministic\n",[53,28762,28763],{"class":55,"line":509},[53,28764,28765],{}," log = (message) ->\n",[53,28767,28768],{"class":55,"line":515},[53,28769,28770],{}," stdout.println \"barber: #{message}\"\n",[53,28772,28773],{"class":55,"line":521},[53,28774,28775],{}," # the following methods define the core behavior\n",[53,28777,28778],{"class":55,"line":527},[53,28779,28780],{}," checkQueue = ->\n",[53,28782,28783],{"class":55,"line":533},[53,28784,28785],{}," if queue.length > 0\n",[53,28787,28788],{"class":55,"line":539},[53,28789,28790],{}," serveCustomer queue.shift()\n",[53,28792,28793],{"class":55,"line":545},[53,28794,28795],{}," freeSeats += 1\n",[53,28797,28798],{"class":55,"line":2414},[53,28799,28800],{}," return true\n",[53,28802,28803],{"class":55,"line":2426},[53,28804,28805],{}," else\n",[53,28807,28808],{"class":55,"line":2438},[53,28809,24425],{},[53,28811,28812],{"class":55,"line":2451},[53,28813,28814],{}," serveCustomer = ({customer, replier}) ->\n",[53,28816,28817],{"class":55,"line":2459},[53,28818,28819],{}," log \"serving #{customer}\"\n",[53,28821,28822],{"class":55,"line":2470},[53,28823,28824],{}," busy = true\n",[53,28826,28827],{"class":55,"line":2476},[53,28828,28829],{}," replier 'serve', (message, replier) ->\n",[53,28831,28832],{"class":55,"line":2484},[53,28833,28834],{}," vertx.setTimer waitTime(), ->\n",[53,28836,28837],{"class":55,"line":2490},[53,28838,28839],{}," log \"done serving #{customer}\"\n",[53,28841,28842],{"class":55,"line":2495},[53,28843,28844],{}," busy = checkQueue()\n",[53,28846,28847],{"class":55,"line":2507},[53,28848,28849],{}," replier 'done'\n",[53,28851,28852],{"class":55,"line":2528},[53,28853,28854],{}," # this is the handler's callback method that\n",[53,28856,28857],{"class":55,"line":2539},[53,28858,28859],{}," # is being returned by the barber function\n",[53,28861,28862],{"class":55,"line":2551},[53,28863,28864],{}," (message, replier) ->\n",[53,28866,28867],{"class":55,"line":2562},[53,28868,28869],{}," customer = message\n",[53,28871,28872],{"class":55,"line":2573},[53,28873,28874],{}," if busy\n",[53,28876,28877],{"class":55,"line":2585},[53,28878,28879],{}," # there is an intermediate state where we know that we\n",[53,28881,28882],{"class":55,"line":2593},[53,28883,28884],{}," # have to queue the customer because there aren't any\n",[53,28886,28887],{"class":55,"line":2600},[53,28888,28889],{}," # free seats, but the customer must first acknowledge\n",[53,28891,28892],{"class":55,"line":2605},[53,28893,28894],{}," # the waiting state before we can actually put him in\n",[53,28896,28897],{"class":55,"line":2610},[53,28898,28899],{}," # the queue.\n",[53,28901,28902],{"class":55,"line":2622},[53,28903,28904],{}," if freeSeats > 0\n",[53,28906,28907],{"class":55,"line":2638},[53,28908,28909],{}," freeSeats -= 1\n",[53,28911,28912],{"class":55,"line":2649},[53,28913,28914],{}," log \"sending #{customer} to queue\"\n",[53,28916,28917],{"class":55,"line":2660},[53,28918,28919],{}," replier 'busy', (message, replier) ->\n",[53,28921,28922],{"class":55,"line":2672},[53,28923,28924],{}," # customer waiting ack\n",[53,28926,28927],{"class":55,"line":2683},[53,28928,28929],{}," queue.push {customer, replier}\n",[53,28931,28932],{"class":55,"line":2694},[53,28933,28934],{}," log \"queued #{customer} - \" +\n",[53,28936,28937],{"class":55,"line":2701},[53,28938,28939],{}," \"length: #{queue.length} - free seats: #{freeSeats}\"\n",[53,28941,28942],{"class":55,"line":2708},[53,28943,28944],{}," else\n",[53,28946,28947],{"class":55,"line":2713},[53,28948,28949],{}," replier 'full'\n",[53,28951,28952],{"class":55,"line":2718},[53,28953,28805],{},[53,28955,28956],{"class":55,"line":2730},[53,28957,28958],{}," serveCustomer {customer, replier}\n",[53,28960,28961],{"class":55,"line":2755},[53,28962,28963],{},"exports.start = ->\n",[53,28965,28966],{"class":55,"line":2767},[53,28967,28968],{}," vertx.eventBus.registerHandler addr, barber()\n",[18,28970,28971],{},"The state of the barber is encoded by the callback method that will be called for an upcoming event and the values of\nthe variables defined in the closure. By being able to store repliers you can easily trigger remote state changes\natomically, when they should occur.",[18,28973,28974],{},"Customer.coffee\nLet’s define the behavior of the customer in a separate file",[43,28976,28978],{"className":13786,"code":28977,"language":13788,"meta":48,"style":48},"\nvertx = require 'vertx'\naddr = 'barber'\nwaitTime = -> Math.random() * 100\nsendCustomer = (i) ->\n # As with the barber, the customer's state is\n # defined in this closure. The variables will\n # be modified by the callback methods that are\n # triggered by the message handler's replies.\n waiting = false\n beingServed = false\n log = (message) ->\n stdout.println \"customer #{i}: #{message}\"\n # just a shorthand\n send = (message, callback) ->\n vertx.eventBus.send addr, message, callback\n # factor out the exit method:\n # a customer can exit after having been served\n # or when the queue is full\n exit = (message) ->\n log message + \" - exiting\"\n # this method doesn't send a response\n # via the replier\n getHaircut = (message, replier) ->\n waiting = false\n beingServed = true\n log \"being served\"\n replier 'being-served', exit\n log \"enters\"\n send \"customer #{i}\", (message, replier) ->\n switch message\n when 'busy'\n waiting = true\n log 'waiting'\n replier 'waiting', getHaircut\n when 'serve'\n getHaircut message, replier\n when 'full'\n exit message\n# a loop that continuously sends customers\n# to the barber\nsendCustomerLoop = (i) ->\n sendCustomer i\n vertx.setTimer waitTime(), -> sendCustomerLoop i + 1\nexports.start = ->\n sendCustomerLoop 1\n\n",[50,28979,28980,28984,28988,28992,28996,29001,29006,29011,29016,29021,29026,29031,29035,29040,29045,29050,29055,29060,29065,29070,29075,29080,29085,29090,29095,29100,29105,29110,29115,29120,29125,29130,29135,29140,29145,29150,29155,29160,29165,29170,29175,29180,29185,29190,29195,29199],{"__ignoreMap":48},[53,28981,28982],{"class":55,"line":56},[53,28983,500],{"emptyLinePlaceholder":499},[53,28985,28986],{"class":55,"line":86},[53,28987,28617],{},[53,28989,28990],{"class":55,"line":126},[53,28991,28720],{},[53,28993,28994],{"class":55,"line":163},[53,28995,28725],{},[53,28997,28998],{"class":55,"line":186},[53,28999,29000],{},"sendCustomer = (i) ->\n",[53,29002,29003],{"class":55,"line":221},[53,29004,29005],{}," # As with the barber, the customer's state is\n",[53,29007,29008],{"class":55,"line":242},[53,29009,29010],{}," # defined in this closure. The variables will\n",[53,29012,29013],{"class":55,"line":273},[53,29014,29015],{}," # be modified by the callback methods that are\n",[53,29017,29018],{"class":55,"line":279},[53,29019,29020],{}," # triggered by the message handler's replies.\n",[53,29022,29023],{"class":55,"line":496},[53,29024,29025],{}," waiting = false\n",[53,29027,29028],{"class":55,"line":503},[53,29029,29030],{}," beingServed = false\n",[53,29032,29033],{"class":55,"line":509},[53,29034,28765],{},[53,29036,29037],{"class":55,"line":515},[53,29038,29039],{}," stdout.println \"customer #{i}: #{message}\"\n",[53,29041,29042],{"class":55,"line":521},[53,29043,29044],{}," # just a shorthand\n",[53,29046,29047],{"class":55,"line":527},[53,29048,29049],{}," send = (message, callback) ->\n",[53,29051,29052],{"class":55,"line":533},[53,29053,29054],{}," vertx.eventBus.send addr, message, callback\n",[53,29056,29057],{"class":55,"line":539},[53,29058,29059],{}," # factor out the exit method:\n",[53,29061,29062],{"class":55,"line":545},[53,29063,29064],{}," # a customer can exit after having been served\n",[53,29066,29067],{"class":55,"line":2414},[53,29068,29069],{}," # or when the queue is full\n",[53,29071,29072],{"class":55,"line":2426},[53,29073,29074],{}," exit = (message) ->\n",[53,29076,29077],{"class":55,"line":2438},[53,29078,29079],{}," log message + \" - exiting\"\n",[53,29081,29082],{"class":55,"line":2451},[53,29083,29084],{}," # this method doesn't send a response\n",[53,29086,29087],{"class":55,"line":2459},[53,29088,29089],{}," # via the replier\n",[53,29091,29092],{"class":55,"line":2470},[53,29093,29094],{}," getHaircut = (message, replier) ->\n",[53,29096,29097],{"class":55,"line":2476},[53,29098,29099],{}," waiting = false\n",[53,29101,29102],{"class":55,"line":2484},[53,29103,29104],{}," beingServed = true\n",[53,29106,29107],{"class":55,"line":2490},[53,29108,29109],{}," log \"being served\"\n",[53,29111,29112],{"class":55,"line":2495},[53,29113,29114],{}," replier 'being-served', exit\n",[53,29116,29117],{"class":55,"line":2507},[53,29118,29119],{}," log \"enters\"\n",[53,29121,29122],{"class":55,"line":2528},[53,29123,29124],{}," send \"customer #{i}\", (message, replier) ->\n",[53,29126,29127],{"class":55,"line":2539},[53,29128,29129],{}," switch message\n",[53,29131,29132],{"class":55,"line":2551},[53,29133,29134],{}," when 'busy'\n",[53,29136,29137],{"class":55,"line":2562},[53,29138,29139],{}," waiting = true\n",[53,29141,29142],{"class":55,"line":2573},[53,29143,29144],{}," log 'waiting'\n",[53,29146,29147],{"class":55,"line":2585},[53,29148,29149],{}," replier 'waiting', getHaircut\n",[53,29151,29152],{"class":55,"line":2593},[53,29153,29154],{}," when 'serve'\n",[53,29156,29157],{"class":55,"line":2600},[53,29158,29159],{}," getHaircut message, replier\n",[53,29161,29162],{"class":55,"line":2605},[53,29163,29164],{}," when 'full'\n",[53,29166,29167],{"class":55,"line":2610},[53,29168,29169],{}," exit message\n",[53,29171,29172],{"class":55,"line":2622},[53,29173,29174],{},"# a loop that continuously sends customers\n",[53,29176,29177],{"class":55,"line":2638},[53,29178,29179],{},"# to the barber\n",[53,29181,29182],{"class":55,"line":2649},[53,29183,29184],{},"sendCustomerLoop = (i) ->\n",[53,29186,29187],{"class":55,"line":2660},[53,29188,29189],{}," sendCustomer i\n",[53,29191,29192],{"class":55,"line":2672},[53,29193,29194],{}," vertx.setTimer waitTime(), -> sendCustomerLoop i + 1\n",[53,29196,29197],{"class":55,"line":2683},[53,29198,28963],{},[53,29200,29201],{"class":55,"line":2694},[53,29202,29203],{}," sendCustomerLoop 1\n",[18,29205,29206],{},"barbershop.coffee\nThis time, we want to run both handler and sender in the same process, for easier testing.",[43,29208,29210],{"className":13786,"code":29209,"language":13788,"meta":48,"style":48},"\nbarber = require 'barber'\ncustomer = require 'customer'\nbarber.start()\ncustomer.start()\n\n",[50,29211,29212,29216,29221,29226,29231],{"__ignoreMap":48},[53,29213,29214],{"class":55,"line":56},[53,29215,500],{"emptyLinePlaceholder":499},[53,29217,29218],{"class":55,"line":86},[53,29219,29220],{},"barber = require 'barber'\n",[53,29222,29223],{"class":55,"line":126},[53,29224,29225],{},"customer = require 'customer'\n",[53,29227,29228],{"class":55,"line":163},[53,29229,29230],{},"barber.start()\n",[53,29232,29233],{"class":55,"line":186},[53,29234,29235],{},"customer.start()\n",[18,29237,29238,29239,29242],{},"Running the shop\nWhen we start the ",[50,29240,29241],{},"barbershop.coffee"," script, we can see in the log that the shop is running as it is supposed to:",[43,29244,29246],{"className":13786,"code":29245,"language":13788,"meta":48,"style":48},"\ncustomer 1: enters\nbarber: serving customer 1\ncustomer 1: being served\nbarber: done serving customer 1\ncustomer 1: done - exiting\ncustomer 2: enters\nbarber: serving customer 2\ncustomer 2: being served\nbarber: done serving customer 2\ncustomer 2: done - exiting\ncustomer 3: enters\n[...]\n\n",[50,29247,29248,29252,29257,29262,29267,29272,29277,29282,29287,29292,29297,29302,29307],{"__ignoreMap":48},[53,29249,29250],{"class":55,"line":56},[53,29251,500],{"emptyLinePlaceholder":499},[53,29253,29254],{"class":55,"line":86},[53,29255,29256],{},"customer 1: enters\n",[53,29258,29259],{"class":55,"line":126},[53,29260,29261],{},"barber: serving customer 1\n",[53,29263,29264],{"class":55,"line":163},[53,29265,29266],{},"customer 1: being served\n",[53,29268,29269],{"class":55,"line":186},[53,29270,29271],{},"barber: done serving customer 1\n",[53,29273,29274],{"class":55,"line":221},[53,29275,29276],{},"customer 1: done - exiting\n",[53,29278,29279],{"class":55,"line":242},[53,29280,29281],{},"customer 2: enters\n",[53,29283,29284],{"class":55,"line":273},[53,29285,29286],{},"barber: serving customer 2\n",[53,29288,29289],{"class":55,"line":279},[53,29290,29291],{},"customer 2: being served\n",[53,29293,29294],{"class":55,"line":496},[53,29295,29296],{},"barber: done serving customer 2\n",[53,29298,29299],{"class":55,"line":503},[53,29300,29301],{},"customer 2: done - exiting\n",[53,29303,29304],{"class":55,"line":509},[53,29305,29306],{},"customer 3: enters\n",[53,29308,29309],{"class":55,"line":515},[53,29310,29311],{},"[...]\n",[18,29313,29314],{},"This is what the output looks like when there is no congestion at all. By chance, these customers came in just after the\nprevious customer was served. If we wait a little longer, we can see a customer entering while the barber is busy:",[43,29316,29318],{"className":13786,"code":29317,"language":13788,"meta":48,"style":48},"\nbarber: serving customer 3\ncustomer 3: being served\ncustomer 4: enters\nbarber: sending customer 4 to queue\ncustomer 4: waiting\nbarber: queued customer 4 - length: 1 - free seats: 19\ncustomer 5: enters\nbarber: sending customer 5 to queue\ncustomer 5: waiting\nbarber: queued customer 5 - length: 2 - free seats: 18\nbarber: done serving customer 3\nbarber: serving customer 4\ncustomer 3: done - exiting\ncustomer 4: being served\n\n",[50,29319,29320,29324,29329,29334,29339,29344,29349,29354,29359,29364,29369,29374,29379,29384,29389],{"__ignoreMap":48},[53,29321,29322],{"class":55,"line":56},[53,29323,500],{"emptyLinePlaceholder":499},[53,29325,29326],{"class":55,"line":86},[53,29327,29328],{},"barber: serving customer 3\n",[53,29330,29331],{"class":55,"line":126},[53,29332,29333],{},"customer 3: being served\n",[53,29335,29336],{"class":55,"line":163},[53,29337,29338],{},"customer 4: enters\n",[53,29340,29341],{"class":55,"line":186},[53,29342,29343],{},"barber: sending customer 4 to queue\n",[53,29345,29346],{"class":55,"line":221},[53,29347,29348],{},"customer 4: waiting\n",[53,29350,29351],{"class":55,"line":242},[53,29352,29353],{},"barber: queued customer 4 - length: 1 - free seats: 19\n",[53,29355,29356],{"class":55,"line":273},[53,29357,29358],{},"customer 5: enters\n",[53,29360,29361],{"class":55,"line":279},[53,29362,29363],{},"barber: sending customer 5 to queue\n",[53,29365,29366],{"class":55,"line":496},[53,29367,29368],{},"customer 5: waiting\n",[53,29370,29371],{"class":55,"line":503},[53,29372,29373],{},"barber: queued customer 5 - length: 2 - free seats: 18\n",[53,29375,29376],{"class":55,"line":509},[53,29377,29378],{},"barber: done serving customer 3\n",[53,29380,29381],{"class":55,"line":515},[53,29382,29383],{},"barber: serving customer 4\n",[53,29385,29386],{"class":55,"line":521},[53,29387,29388],{},"customer 3: done - exiting\n",[53,29390,29391],{"class":55,"line":527},[53,29392,29393],{},"customer 4: being served\n",[18,29395,29396,29397,29400,29401,29404],{},"As you can see, customer 4 was added to the queue and is being served right customer 3 is done. But what happens if the\nqueue is full? Let’s set ",[50,29398,29399],{},"waitTime = -> Math.random() * 80"," in ",[50,29402,29403],{},"customer.coffee"," so that there are a few more customers\nentering than leaving.",[43,29406,29408],{"className":13786,"code":29407,"language":13788,"meta":48,"style":48},"\ncustomer 34: enters\nbarber: sending customer 34 to queue\ncustomer 34: waiting\nbarber: queued customer 34 - length: 20 - free seats: 0\ncustomer 35: enters\ncustomer 35: full - exiting\n\n",[50,29409,29410,29414,29419,29424,29429,29434,29439],{"__ignoreMap":48},[53,29411,29412],{"class":55,"line":56},[53,29413,500],{"emptyLinePlaceholder":499},[53,29415,29416],{"class":55,"line":86},[53,29417,29418],{},"customer 34: enters\n",[53,29420,29421],{"class":55,"line":126},[53,29422,29423],{},"barber: sending customer 34 to queue\n",[53,29425,29426],{"class":55,"line":163},[53,29427,29428],{},"customer 34: waiting\n",[53,29430,29431],{"class":55,"line":186},[53,29432,29433],{},"barber: queued customer 34 - length: 20 - free seats: 0\n",[53,29435,29436],{"class":55,"line":221},[53,29437,29438],{},"customer 35: enters\n",[53,29440,29441],{"class":55,"line":242},[53,29442,29443],{},"customer 35: full - exiting\n",[18,29445,29446],{},"New customers are being turned away, as expected. The important thing is that there is no deadlocks and no invalid\nstates, which can be easily checked by reading the log output. Knowing that there is just one callback method being\nexecuted at any point in time is a great help when reasoning about the program.",[18,29448,29449,29450,10671,29453,29456,29457,29460,29461,29464],{},"Conclusion\nThe central primitive is the construct ",[50,29451,29452],{},"replier(send_message, next_state)",[50,29454,29455],{},"replier"," triggers a state transition in\nthe remote system through ",[50,29458,29459],{},"send_message"," and defines the local ",[50,29462,29463],{},"next_state",".\nIf you can model your system as something similar to linked state machines, this concurrency approach is easy to\nimplement and very powerful.",[16627,29466],{},[607,29468,989],{},{"title":48,"searchDepth":86,"depth":86,"links":29470},[],[613,996],"2013-04-24T12:34:33","CoffeeScript\\nVert.x supports JavaScript through the Rhino JavaScript engine. Although\\nJavaScript is a decent language once you get to know it, I prefer CoffeeScript, a\\nlanguage that compiles to JavaScript. Luckily, vert.x has built-in support for CoffeeScript, so I can use it nearly\\ntransparently. You will only notice the JavaScript under the hood when reading stack traces, which will refer to the\\ncompiled JavaScript file.\\nFor the examples in this blog post, the only thing you need to know a little CoffeeScript:","https://synyx.de/blog/asynchronous-concurrency-with-vert-x-part-2/",{},"/blog/asynchronous-concurrency-with-vert-x-part-2",{"title":28511,"description":29478},"CoffeeScript\nVert.x supports JavaScript through the Rhino JavaScript engine. Although\nJavaScript is a decent language once you get to know it, I prefer CoffeeScript, a\nlanguage that compiles to JavaScript. Luckily, vert.x has built-in support for CoffeeScript, so I can use it nearly\ntransparently. You will only notice the JavaScript under the hood when reading stack traces, which will refer to the\ncompiled JavaScript file.\nFor the examples in this blog post, the only thing you need to know a little CoffeeScript:","asynchronous-concurrency-with-vert-x-part-2","blog/asynchronous-concurrency-with-vert-x-part-2",[29482,16601,7265,23481],"coffeescript","CoffeeScript Vert.x supports JavaScript through the Rhino JavaScript engine. Although JavaScript is a decent language once you get to know it, I prefer CoffeeScript, a language that compiles to JavaScript.…","jAhQ7HOciuqYnRkEMPmBHkaC1DBrMMrda632jXz81ds",{"id":29486,"title":29487,"author":29488,"body":29489,"category":29667,"date":29668,"description":29669,"extension":617,"link":29670,"meta":29671,"navigation":499,"path":29672,"seo":29673,"slug":29499,"stem":29674,"tags":29675,"teaser":29678,"__hash__":29679},"blog/blog/what-is-an-acceptance-test.md","Acceptance testing at synyx – Part 5",[12981],{"type":11,"value":29490,"toc":29664},[29491,29494,29497,29501,29530,29533,29536,29607,29614,29621,29632,29656,29659,29662],[14,29492,29487],{"id":29493},"acceptance-testing-at-synyx-part-5",[18,29495,29496],{},"The last few blogs about acceptance-testing focused on setting up a nice and scalable infrastructure to do testing\nthrough the (web)-GUI using a Selenium grid. Since we’ve got this running now we can go on to topics that focus how we\nwrite these tests. At synyx we try to write our web-tests as “acceptance-tests” so we first take a small dive into\nwhat that is.",[2207,29498,29500],{"id":29499},"what-is-an-acceptance-test","What is an Acceptance Test?",[18,29502,29503,29504,29509,29510,29515,29516,29519,29520,29523,29524,29529],{},"In the first place an acceptance test cares about what is tested, not so much about how this is done. Consider that you\nspecify features for an application, hence you write ",[585,29505,29508],{"href":29506,"rel":29507},"http://en.wikipedia.org/wiki/User_story",[589],"user stories"," for them.\nThen you will soon get to the question, when the work at a story is completed. A good approach to define when you are\nreally done with a story is to define some ",[585,29511,29514],{"href":29512,"rel":29513},"http://scrummethodology.com/scrum-acceptance-criteria/",[589],"acceptance criteria","\nfor it. Then you check if the application meets these criteria to determine if the story you are working on is done. So\nif a story has the title ",[573,29517,29518],{},"“A shop-item can be added to the shopping cart”"," you probably can define acceptance criterias\npretty easy. One of them could be ",[573,29521,29522],{},"“each item in the shop that is currently available for ordering can be added to the\nusers shopping cart. If the user views his cart afterwards the item is listed there”",". Traditionally these criteria\nmight get tested by the developers themself and later some variation\nof ",[585,29525,29528],{"href":29526,"rel":29527},"https://web.archive.org/web/20170331070234if_/http://cdn.memegenerator.net/instances/400x/24216149.jpg",[589],"QA",". But\nit’s preferable if these criteria are tested automatically somewhere in the build pipeline.",[18,29531,29532],{},"Automating acceptance-testing has big advantages over manual testing: Noone has to do the same thing all over again.\nBecause if a human does, he will make mistakes. He will forget to test something and maybe skip over other things. Also,\nmanual tests are boring and take alot of time. And if you get more and more tests by the time you’d have to hire more\nand more people which will cost alot of money. Much more than simply scaling your test-cluster up.",[18,29534,29535],{},"But lets think about this… If you are writing unit tests you probably also write acceptance tests. Yes, some tests are\nrather technical and low-level. Therefore they will not be part of the acceptance criteria of a user story given to the\ndevelopers by the product owner. But some of the tests we write are acceptance-tests “by accident”:",[43,29537,29539],{"className":288,"code":29538,"language":290,"meta":48,"style":48},"\n@Test\npublic void addsAvailableItemsToCart() {\n Item item = new Item(\"synyx coffee mug\");\n item.setAvailable(true);\n cart.add(item);\n Assert.assertThat(cart.getItems(), containsItem(item));\n}\n@Test(expected = ItemUnavailableException.class)\npublic void doesNotAddUnavailableItemsToCart() {\n Item item = new Item(\"synyx coffee mug\");\n item.setAvailable(false);\n cart.add(item); // exception expected\n}\n\n",[50,29540,29541,29545,29550,29555,29560,29565,29570,29575,29579,29584,29589,29593,29598,29603],{"__ignoreMap":48},[53,29542,29543],{"class":55,"line":56},[53,29544,500],{"emptyLinePlaceholder":499},[53,29546,29547],{"class":55,"line":86},[53,29548,29549],{},"@Test\n",[53,29551,29552],{"class":55,"line":126},[53,29553,29554],{},"public void addsAvailableItemsToCart() {\n",[53,29556,29557],{"class":55,"line":163},[53,29558,29559],{}," Item item = new Item(\"synyx coffee mug\");\n",[53,29561,29562],{"class":55,"line":186},[53,29563,29564],{}," item.setAvailable(true);\n",[53,29566,29567],{"class":55,"line":221},[53,29568,29569],{}," cart.add(item);\n",[53,29571,29572],{"class":55,"line":242},[53,29573,29574],{}," Assert.assertThat(cart.getItems(), containsItem(item));\n",[53,29576,29577],{"class":55,"line":273},[53,29578,282],{},[53,29580,29581],{"class":55,"line":279},[53,29582,29583],{},"@Test(expected = ItemUnavailableException.class)\n",[53,29585,29586],{"class":55,"line":496},[53,29587,29588],{},"public void doesNotAddUnavailableItemsToCart() {\n",[53,29590,29591],{"class":55,"line":503},[53,29592,29559],{},[53,29594,29595],{"class":55,"line":509},[53,29596,29597],{}," item.setAvailable(false);\n",[53,29599,29600],{"class":55,"line":515},[53,29601,29602],{}," cart.add(item); // exception expected\n",[53,29604,29605],{"class":55,"line":521},[53,29606,282],{},[18,29608,29609,29610,29613],{},"As you can see the two test methods above could be a way to verify the criteria of the story ",[573,29611,29612],{},"“A shop-item can be added\nto the shopping cart”"," I described above.",[18,29615,29616,29617,29620],{},"But – of course – there are other ways to test the same thing. The test above looks like a unit test (our unit is the\nshopping cart implementation ",[50,29618,29619],{},"Cart.java","). We could also test on service-level or through the GUI. Since the mentioned\nexample is kind of trivial there is no need to test it on another level than the unit-level. But some stories have more\ncomplex acceptance criteria and need more complex tests that have to be tested on higher levels. You might also want to\nbe sure that some components work together to complete a task and you want to test these together.",[18,29622,29623,29624,29627,29628,29631],{},"Acceptance criteria should focus on functional requirements and therefore be without technical details. The criteria\nbelongs directly to the user stories and should be created along with the user stories. Because requirements or stories\nare usually written by non-technical pepole a common language is needed: Business-People have to be able to write them\nand developers or testers have to understand and implement them. This is why I prefer the BDD-Style of specifig\nacceptance criteria. Here you define usage scenarios of the application / the story. The scenario defines ",[27,29625,29626],{},"what"," the\nuser wants to do, how this breaks down to single ",[27,29629,29630],{},"steps",", what preconditons have to be met and what the expected\noutcome of the actions are. So these criteria are often verbalized in given/when/then form:",[43,29633,29635],{"className":13786,"code":29634,"language":13788,"meta":48,"style":48},"\nGiven I look at the details of the item \"synyx coffee mug\"\nWhen I add the item to the shopping cart\nThen there is a \"synyx coffee mug\" in my Cart.\n\n",[50,29636,29637,29641,29646,29651],{"__ignoreMap":48},[53,29638,29639],{"class":55,"line":56},[53,29640,500],{"emptyLinePlaceholder":499},[53,29642,29643],{"class":55,"line":86},[53,29644,29645],{},"Given I look at the details of the item \"synyx coffee mug\"\n",[53,29647,29648],{"class":55,"line":126},[53,29649,29650],{},"When I add the item to the shopping cart\n",[53,29652,29653],{"class":55,"line":163},[53,29654,29655],{},"Then there is a \"synyx coffee mug\" in my Cart.\n",[18,29657,29658],{},"As you can see I wrote the acceptance criteria based on the GUI my webshop has. I think this is much easier for a\nnon-technical person since noone has to focus on services, components and whatever exists in your webshop application\nbut on what the product owner “knows” and sees (or wants to see) when he is using the application.",[18,29660,29661],{},"This is the time it comes in handy to have a selenium-grid at our service. So in the next few posts we are gonna\ndiscuss how to write tests using selenium to do acceptance testing through the GUI. And – of course – we also gonna\ndiscuss how to turn the BDD-Style criteria into code. So again… stay tuned 🙂",[607,29663,989],{},{"title":48,"searchDepth":86,"depth":86,"links":29665},[29666],{"id":29499,"depth":86,"text":29500},[613],"2013-04-18T07:42:31","The last few blogs about acceptance-testing focused on setting up a nice and scalable infrastructure to do testing\\nthrough the (web)-GUI using a Selenium grid. Since we’ve got this running now we can go on to topics that focus how we\\nwrite these tests. At synyx we try to write our web-tests as “acceptance-tests” so we first take a small dive into\\nwhat that is.","https://synyx.de/blog/what-is-an-acceptance-test/",{},"/blog/what-is-an-acceptance-test",{"title":29487,"description":29496},"blog/what-is-an-acceptance-test",[29676,29677,12448,21474,9417],"acceptance-test","attd","The last few blogs about acceptance-testing focused on setting up a nice and scalable infrastructure to do testing through the (web)-GUI using a Selenium grid. Since we’ve got this running…","m_bJhikfuGrMJf27m7nMvsA46csiSIYMdBVJc77JpJI",{"id":29681,"title":29682,"author":29683,"body":29684,"category":29985,"date":29986,"description":29987,"extension":617,"link":29988,"meta":29989,"navigation":499,"path":29990,"seo":29991,"slug":29993,"stem":29994,"tags":29995,"teaser":29996,"__hash__":29997},"blog/blog/asynchronous-concurrency-with-vert-x-part-1.md","Asynchronous concurrency with vert.x – Part 1",[28513],{"type":11,"value":29685,"toc":29981},[29686,29689,29720,29723,29735,29797,29800,29860,29863,29882,29891,29970,29979],[14,29687,29682],{"id":29688},"asynchronous-concurrency-with-vertx-part-1",[18,29690,29691,29692,29697,29698,29701,29702,29705,29706,29709,29710,29713,29714,29719],{},"Event-Driven Concurrency\nAt synyx, we are looking at ",[585,29693,29696],{"href":29694,"rel":29695},"http://www.vertx.io",[589],"vert.x"," for an upcoming project where we are building a system that\nwill need to scale under load. The tag-line of vert.x is ",[573,29699,29700],{},"effortless asynchronous application development for the\nmodern web and enterprise",", which fits the bill, so I decided to play around with it a little bit.\nThe advantage of event-driven concurrency compared to traditional technologies is the reduced risk of deadlocks,\nlivelocks and race conditions. Using mutexes and semaphores correctly is extremely difficult and can lead to very subtle\nbugs that are difficult to reproduce. The downside is that information can only be shared by passing messages.\nAnybody who has has used jQuery’s ",[50,29703,29704],{},"$.ajax"," should have some idea of what event-driven concurrency means: an event loop\ntriggers predefined callbacks after a certain event happens. In that case, the system is retrieving the data in the\nbackground, while your JavaScript program can do something else in the meantime, like respond to user events. Once the\ndata has arrived, the callback method is triggered and the data is passed as a function argument – no other callback\nfunction can run simultaneously. The same is true for ",[50,29707,29708],{},"setTimeout",", which is used extensively for animations: adjust the\nproperties of an element a little bit each call, then return to the event loop.\nThis is the reason why there is no ",[50,29711,29712],{},"sleep()"," function in JavaScript – the browser would freeze, the user couldn’t\ninteract with the web page. Each callback method must be short-running.\nWith ",[585,29715,29718],{"href":29716,"rel":29717},"https://developer.mozilla.org/en-US/docs/DOM/Using_web_workers",[589],"WebWorkers",", you can now also perform client-side\ncomputation without blocking the main event loop, putting your multi-core CPU to use. The mechanism of communication\nbetween the background task and the main task is the same as with doing asynchronous IO – using callbacks and message\npassing.",[18,29721,29722],{},"vert.x\nVert.x brings this concept to the server side on top of the JVM. It allows writing applications using a event-driven\nconcurrency model. There are bindings for basically every language that runs on top of the JVM: Java, Ruby, Groovy,\nPython and JavaScript. The distributed event bus provides seamless scaling over multiple cores or hosts. You perform a\ncomputation one process, send data via the event bus to another process where a callback method is executed.",[18,29724,29725,29726,29731,29732,4101],{},"Event bus\nA short example in JavaScript that uses the event bus from\nthe ",[585,29727,29730],{"href":29728,"rel":29729},"https://github.com/vert-x/vert.x/blob/master/vertx-examples/src/main/javascript/eventbus",[589],"vert.x github repository"," –\ndefine a message handler that simply displays messages sent to the event bus address ",[50,29733,29734],{},"example.address",[43,29736,29738],{"className":13786,"code":29737,"language":13788,"meta":48,"style":48},"\n// file handler.js\nload('vertx.js')\nvar eb = vertx.eventBus;\nvar address = 'example.address'\nvar handler = function(message) {\n stdout.println('Received message ' + message)\n}\neb.registerHandler(address, handler);\nfunction vertxStop() {\n eb.unregisterHandler(address, handler);\n}\n\n",[50,29739,29740,29744,29749,29754,29759,29764,29769,29774,29778,29783,29788,29793],{"__ignoreMap":48},[53,29741,29742],{"class":55,"line":56},[53,29743,500],{"emptyLinePlaceholder":499},[53,29745,29746],{"class":55,"line":86},[53,29747,29748],{},"// file handler.js\n",[53,29750,29751],{"class":55,"line":126},[53,29752,29753],{},"load('vertx.js')\n",[53,29755,29756],{"class":55,"line":163},[53,29757,29758],{},"var eb = vertx.eventBus;\n",[53,29760,29761],{"class":55,"line":186},[53,29762,29763],{},"var address = 'example.address'\n",[53,29765,29766],{"class":55,"line":221},[53,29767,29768],{},"var handler = function(message) {\n",[53,29770,29771],{"class":55,"line":242},[53,29772,29773],{}," stdout.println('Received message ' + message)\n",[53,29775,29776],{"class":55,"line":273},[53,29777,282],{},[53,29779,29780],{"class":55,"line":279},[53,29781,29782],{},"eb.registerHandler(address, handler);\n",[53,29784,29785],{"class":55,"line":496},[53,29786,29787],{},"function vertxStop() {\n",[53,29789,29790],{"class":55,"line":503},[53,29791,29792],{}," eb.unregisterHandler(address, handler);\n",[53,29794,29795],{"class":55,"line":509},[53,29796,282],{},[18,29798,29799],{},"We put the program that sends messages in a different file to achieve process isolation:",[43,29801,29803],{"className":13786,"code":29802,"language":13788,"meta":48,"style":48},"\n// file sender.js\nload('vertx.js')\nvar eb = vertx.eventBus;\nvar address = 'example.address'\nvertx.setPeriodic(2000, sendMessage)\nvar count = 0\nfunction sendMessage() {\n var msg = \"some-message-\" + count++;\n eb.send(address, msg);\n stdout.println(\"sent message \" + msg)\n}\n\n",[50,29804,29805,29809,29814,29818,29822,29826,29831,29836,29841,29846,29851,29856],{"__ignoreMap":48},[53,29806,29807],{"class":55,"line":56},[53,29808,500],{"emptyLinePlaceholder":499},[53,29810,29811],{"class":55,"line":86},[53,29812,29813],{},"// file sender.js\n",[53,29815,29816],{"class":55,"line":126},[53,29817,29753],{},[53,29819,29820],{"class":55,"line":163},[53,29821,29758],{},[53,29823,29824],{"class":55,"line":186},[53,29825,29763],{},[53,29827,29828],{"class":55,"line":221},[53,29829,29830],{},"vertx.setPeriodic(2000, sendMessage)\n",[53,29832,29833],{"class":55,"line":242},[53,29834,29835],{},"var count = 0\n",[53,29837,29838],{"class":55,"line":273},[53,29839,29840],{},"function sendMessage() {\n",[53,29842,29843],{"class":55,"line":279},[53,29844,29845],{}," var msg = \"some-message-\" + count++;\n",[53,29847,29848],{"class":55,"line":496},[53,29849,29850],{}," eb.send(address, msg);\n",[53,29852,29853],{"class":55,"line":503},[53,29854,29855],{}," stdout.println(\"sent message \" + msg)\n",[53,29857,29858],{"class":55,"line":509},[53,29859,282],{},[18,29861,29862],{},"Both programs are then started separately using the vertx runtime. They can then communicate on the event bus via the\nnetwork:",[43,29864,29866],{"className":13786,"code":29865,"language":13788,"meta":48,"style":48},"\n# vertx run handler.js -cluster -cluster-port 10001 &\n# vertx run sender.js -cluster -cluster-port 10002\n\n",[50,29867,29868,29872,29877],{"__ignoreMap":48},[53,29869,29870],{"class":55,"line":56},[53,29871,500],{"emptyLinePlaceholder":499},[53,29873,29874],{"class":55,"line":86},[53,29875,29876],{},"# vertx run handler.js -cluster -cluster-port 10001 &\n",[53,29878,29879],{"class":55,"line":126},[53,29880,29881],{},"# vertx run sender.js -cluster -cluster-port 10002\n",[18,29883,29884,29885,29890],{},"Repliers\nThis described system can be extended by the use\nof ",[585,29886,29889],{"href":29887,"rel":29888},"http://vertx.io/core_manual_js.html#replying-to-messages",[589],"repliers",", which can be used to start a dialog between a\nmessage handler and a sender. The sender and the replier both live in the same file this time:",[43,29892,29894],{"className":13786,"code":29893,"language":13788,"meta":48,"style":48},"\nvar vertx = require('vertx'); // alternative import\nvar address = \"example.address\";\nvar handler = function (message, replier) {\n stdout.println(\"sender sent \" + message);\n replier(\"pong 1\", function (message, replier) {\n // and so on\n });\n}\nvertx.eventBus.registerHandler(address, handler);\nvertx.eventBus.send(address, \"ping 1\", function (message, replier) {\n stdout.println(\"handler sent \" + message);\n replier(\"ping 2\", function(message, replier) {\n // and so on\n });\n});\n\n",[50,29895,29896,29900,29905,29910,29915,29920,29925,29930,29934,29938,29943,29948,29953,29958,29962,29966],{"__ignoreMap":48},[53,29897,29898],{"class":55,"line":56},[53,29899,500],{"emptyLinePlaceholder":499},[53,29901,29902],{"class":55,"line":86},[53,29903,29904],{},"var vertx = require('vertx'); // alternative import\n",[53,29906,29907],{"class":55,"line":126},[53,29908,29909],{},"var address = \"example.address\";\n",[53,29911,29912],{"class":55,"line":163},[53,29913,29914],{},"var handler = function (message, replier) {\n",[53,29916,29917],{"class":55,"line":186},[53,29918,29919],{}," stdout.println(\"sender sent \" + message);\n",[53,29921,29922],{"class":55,"line":221},[53,29923,29924],{}," replier(\"pong 1\", function (message, replier) {\n",[53,29926,29927],{"class":55,"line":242},[53,29928,29929],{}," // and so on\n",[53,29931,29932],{"class":55,"line":273},[53,29933,7379],{},[53,29935,29936],{"class":55,"line":279},[53,29937,282],{},[53,29939,29940],{"class":55,"line":496},[53,29941,29942],{},"vertx.eventBus.registerHandler(address, handler);\n",[53,29944,29945],{"class":55,"line":503},[53,29946,29947],{},"vertx.eventBus.send(address, \"ping 1\", function (message, replier) {\n",[53,29949,29950],{"class":55,"line":509},[53,29951,29952],{}," stdout.println(\"handler sent \" + message);\n",[53,29954,29955],{"class":55,"line":515},[53,29956,29957],{}," replier(\"ping 2\", function(message, replier) {\n",[53,29959,29960],{"class":55,"line":521},[53,29961,29929],{},[53,29963,29964],{"class":55,"line":527},[53,29965,7379],{},[53,29967,29968],{"class":55,"line":533},[53,29969,7422],{},[2207,29971,29973,29974,29978],{"id":29972},"every-sent-message-can-be-acknowledged-with-a-reply-by-the-other-side-and-vice-versa-this-concurrency-model-is-very-easy-to-grasp-and-very-powerful-we-will-use-it-in-the-next-part-of-this-series-where-we-tackle-the-sleeping-barber-problem-stay-tuned","Every sent message can be acknowledged with a reply by the other side and vice versa. This concurrency model is very easy to grasp and very powerful. We will use it in the next part of this series, where we tackle the ",[585,29975,29977],{"href":28675,"rel":29976},[589],"Sleeping barber problem"," – stay tuned!",[607,29980,989],{},{"title":48,"searchDepth":86,"depth":86,"links":29982},[29983],{"id":29972,"depth":86,"text":29984},"Every sent message can be acknowledged with a reply by the other side and vice versa. This concurrency model is very easy to grasp and very powerful. We will use it in the next part of this series, where we tackle the Sleeping barber problem – stay tuned!",[613,996],"2013-04-15T20:59:34","Event-Driven Concurrency\\nAt synyx, we are looking at vert.x for an upcoming project where we are building a system that\\nwill need to scale under load. The tag-line of vert.x is effortless asynchronous application development for the\\nmodern web and enterprise, which fits the bill, so I decided to play around with it a little bit.\\nThe advantage of event-driven concurrency compared to traditional technologies is the reduced risk of deadlocks,\\nlivelocks and race conditions. Using mutexes and semaphores correctly is extremely difficult and can lead to very subtle\\nbugs that are difficult to reproduce. The downside is that information can only be shared by passing messages.\\nAnybody who has has used jQuery’s $.ajax should have some idea of what event-driven concurrency means: an event loop\\ntriggers predefined callbacks after a certain event happens. In that case, the system is retrieving the data in the\\nbackground, while your JavaScript program can do something else in the meantime, like respond to user events. Once the\\ndata has arrived, the callback method is triggered and the data is passed as a function argument – no other callback\\nfunction can run simultaneously. The same is true for setTimeout, which is used extensively for animations: adjust the\\nproperties of an element a little bit each call, then return to the event loop.\\nThis is the reason why there is no sleep() function in JavaScript – the browser would freeze, the user couldn’t\\ninteract with the web page. Each callback method must be short-running.\\nWith WebWorkers, you can now also perform client-side\\ncomputation without blocking the main event loop, putting your multi-core CPU to use. The mechanism of communication\\nbetween the background task and the main task is the same as with doing asynchronous IO – using callbacks and message\\npassing.","https://synyx.de/blog/asynchronous-concurrency-with-vert-x-part-1/",{},"/blog/asynchronous-concurrency-with-vert-x-part-1",{"title":29682,"description":29992},"Event-Driven Concurrency\nAt synyx, we are looking at vert.x for an upcoming project where we are building a system that\nwill need to scale under load. The tag-line of vert.x is effortless asynchronous application development for the\nmodern web and enterprise, which fits the bill, so I decided to play around with it a little bit.\nThe advantage of event-driven concurrency compared to traditional technologies is the reduced risk of deadlocks,\nlivelocks and race conditions. Using mutexes and semaphores correctly is extremely difficult and can lead to very subtle\nbugs that are difficult to reproduce. The downside is that information can only be shared by passing messages.\nAnybody who has has used jQuery’s $.ajax should have some idea of what event-driven concurrency means: an event loop\ntriggers predefined callbacks after a certain event happens. In that case, the system is retrieving the data in the\nbackground, while your JavaScript program can do something else in the meantime, like respond to user events. Once the\ndata has arrived, the callback method is triggered and the data is passed as a function argument – no other callback\nfunction can run simultaneously. The same is true for setTimeout, which is used extensively for animations: adjust the\nproperties of an element a little bit each call, then return to the event loop.\nThis is the reason why there is no sleep() function in JavaScript – the browser would freeze, the user couldn’t\ninteract with the web page. Each callback method must be short-running.\nWith WebWorkers, you can now also perform client-side\ncomputation without blocking the main event loop, putting your multi-core CPU to use. The mechanism of communication\nbetween the background task and the main task is the same as with doing asynchronous IO – using callbacks and message\npassing.","asynchronous-concurrency-with-vert-x-part-1","blog/asynchronous-concurrency-with-vert-x-part-1",[16601,7265,23481],"Event-Driven Concurrency At synyx, we are looking at vert.x for an upcoming project where we are building a system that will need to scale under load. The tag-line of vert.x…","DkNmAOE6c6bSo2QAVLXKERm9hchogl5z2xWxAovYy3Q",{"id":29999,"title":30000,"author":30001,"body":30002,"category":30633,"date":30634,"description":30635,"extension":617,"link":30636,"meta":30637,"navigation":499,"path":30638,"seo":30639,"slug":30006,"stem":30641,"tags":30642,"teaser":30647,"__hash__":30648},"blog/blog/liquibase-our-setup-in-a-larger-scale-project.md","Liquibase: Our setup in a larger scale project",[25806],{"type":11,"value":30003,"toc":30623},[30004,30007,30016,30020,30023,30046,30049,30060,30064,30068,30071,30074,30257,30260,30263,30267,30270,30370,30374,30377,30435,30444,30447,30457,30461,30464,30467,30539,30542,30545,30590,30593,30596,30600,30603,30612,30615,30618,30621],[14,30005,30000],{"id":30006},"liquibase-our-setup-in-a-larger-scale-project",[18,30008,30009,30010,30015],{},"In this post, we want to show you our ",[585,30011,30014],{"href":30012,"rel":30013},"http://www.liquibase.org/",[589],"Liquibase"," setup in a larger scale project that we’ve\nbeen developing for some time now.",[2207,30017,30019],{"id":30018},"gather-requirements","Gather Requirements",[18,30021,30022],{},"First off, a bit more information about the project and the whole project environment:",[577,30024,30025,30028,30031,30034,30037,30040,30043],{},[580,30026,30027],{},"The software developed in this project consists of different applications",[580,30029,30030],{},"Some applications use the same database, some use different ones",[580,30032,30033],{},"The software runs in multiple branch offices of a company",[580,30035,30036],{},"Not every application runs in every branch office",[580,30038,30039],{},"Some of the data in the databases of the branch offices is the same as in the others, some isn’t",[580,30041,30042],{},"We use maven with profiles to build for the different branch offices, because they need different config",[580,30044,30045],{},"As we took over the project from the company, the applications and also the database already existed for some years",[18,30047,30048],{},"This results in some requirements for our liquibase setup:",[577,30050,30051,30054,30057],{},[580,30052,30053],{},"We need a configuration for each application, because they don’t neccessarily use the same database, and we can’t be\nsure that in every branch office, this setup is the same",[580,30055,30056],{},"We need different configurations for each branch office",[580,30058,30059],{},"We need different liquibase scripts for each branch office for some data, while we need the same scripts for other\ndata",[2207,30061,30063],{"id":30062},"liquibase-setup","Liquibase setup",[649,30065,30067],{"id":30066},"the-pom-file","The pom file",[18,30069,30070],{},"Because we use maven to build our projects, we also want to use it to build and execute the liquibase scripts. Luckily,\nliquibase brings a maven plugin out of the box. So we created a new maven project and added the liquibase maven plugin\nto it. We configured it to run on the install phase of maven, because we want to preprocess the scripts before they are\nexecuted (to fill in the parameters). The scripts and additional config files will be located in the src/main/resources\nfolder of our project.",[18,30072,30073],{},"As it needs a connection to the database, don’t forget to add the needed database driver dependencies! Also change the\nliquibase artifact corresponding to your database!",[43,30075,30077],{"className":3792,"code":30076,"language":3794,"meta":48,"style":48},"\u003Cbuild>\n \u003Cplugins>\n \u003Cplugin>\n \u003CgroupId>org.liquibase\u003C/groupId>\n \u003CartifactId>liquibase-maven-plugin\u003C/artifactId>\n \u003Cversion>2.0.3\u003C/version>\n \u003Cconfiguration>\n \u003CmigrationSqlOutputFile>\n ${project.build.directory}/liquibase/migrate-${projectname.dbName}-${projectname.environment}.sql\n \u003C/migrationSqlOutputFile>\n \u003CpropertyFile>target/classes/liquibase-${projectname.environment}.properties\u003C/propertyFile>\n \u003C/configuration>\n \u003Cdependencies>\n \u003Cdependency>\n \u003CgroupId>org.liquibase.ext\u003C/groupId>\n \u003CartifactId>liquibase-oracle\u003C/artifactId>\n \u003Cversion>1.2.0\u003C/version>\n \u003C/dependency>\n \u003C/dependencies>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cphase>install\u003C/phase>\n \u003Cgoals>\n \u003Cgoal>update\u003C/goal>\n \u003C/goals>\n \u003C/execution>\n \u003C/executions>\n \u003C/plugin>\n \u003C/plugins>\n \u003Cresources>\n \u003Cresource>\n \u003Cdirectory>src/main/resources\u003C/directory>\n \u003Cfiltering>true\u003C/filtering>\n \u003C/resource>\n \u003C/resources>\n\u003C/build>\n",[50,30078,30079,30084,30089,30094,30099,30104,30109,30114,30119,30124,30129,30134,30139,30144,30148,30153,30158,30163,30167,30172,30177,30182,30187,30192,30197,30202,30207,30212,30217,30222,30227,30232,30237,30242,30247,30252],{"__ignoreMap":48},[53,30080,30081],{"class":55,"line":56},[53,30082,30083],{},"\u003Cbuild>\n",[53,30085,30086],{"class":55,"line":86},[53,30087,30088],{}," \u003Cplugins>\n",[53,30090,30091],{"class":55,"line":126},[53,30092,30093],{}," \u003Cplugin>\n",[53,30095,30096],{"class":55,"line":163},[53,30097,30098],{}," \u003CgroupId>org.liquibase\u003C/groupId>\n",[53,30100,30101],{"class":55,"line":186},[53,30102,30103],{}," \u003CartifactId>liquibase-maven-plugin\u003C/artifactId>\n",[53,30105,30106],{"class":55,"line":221},[53,30107,30108],{}," \u003Cversion>2.0.3\u003C/version>\n",[53,30110,30111],{"class":55,"line":242},[53,30112,30113],{}," \u003Cconfiguration>\n",[53,30115,30116],{"class":55,"line":273},[53,30117,30118],{}," \u003CmigrationSqlOutputFile>\n",[53,30120,30121],{"class":55,"line":279},[53,30122,30123],{}," ${project.build.directory}/liquibase/migrate-${projectname.dbName}-${projectname.environment}.sql\n",[53,30125,30126],{"class":55,"line":496},[53,30127,30128],{}," \u003C/migrationSqlOutputFile>\n",[53,30130,30131],{"class":55,"line":503},[53,30132,30133],{}," \u003CpropertyFile>target/classes/liquibase-${projectname.environment}.properties\u003C/propertyFile>\n",[53,30135,30136],{"class":55,"line":509},[53,30137,30138],{}," \u003C/configuration>\n",[53,30140,30141],{"class":55,"line":515},[53,30142,30143],{}," \u003Cdependencies>\n",[53,30145,30146],{"class":55,"line":521},[53,30147,26990],{},[53,30149,30150],{"class":55,"line":527},[53,30151,30152],{}," \u003CgroupId>org.liquibase.ext\u003C/groupId>\n",[53,30154,30155],{"class":55,"line":533},[53,30156,30157],{}," \u003CartifactId>liquibase-oracle\u003C/artifactId>\n",[53,30159,30160],{"class":55,"line":539},[53,30161,30162],{}," \u003Cversion>1.2.0\u003C/version>\n",[53,30164,30165],{"class":55,"line":545},[53,30166,27005],{},[53,30168,30169],{"class":55,"line":2414},[53,30170,30171],{}," \u003C/dependencies>\n",[53,30173,30174],{"class":55,"line":2426},[53,30175,30176],{}," \u003Cexecutions>\n",[53,30178,30179],{"class":55,"line":2438},[53,30180,30181],{}," \u003Cexecution>\n",[53,30183,30184],{"class":55,"line":2451},[53,30185,30186],{}," \u003Cphase>install\u003C/phase>\n",[53,30188,30189],{"class":55,"line":2459},[53,30190,30191],{}," \u003Cgoals>\n",[53,30193,30194],{"class":55,"line":2470},[53,30195,30196],{}," \u003Cgoal>update\u003C/goal>\n",[53,30198,30199],{"class":55,"line":2476},[53,30200,30201],{}," \u003C/goals>\n",[53,30203,30204],{"class":55,"line":2484},[53,30205,30206],{}," \u003C/execution>\n",[53,30208,30209],{"class":55,"line":2490},[53,30210,30211],{}," \u003C/executions>\n",[53,30213,30214],{"class":55,"line":2495},[53,30215,30216],{}," \u003C/plugin>\n",[53,30218,30219],{"class":55,"line":2507},[53,30220,30221],{}," \u003C/plugins>\n",[53,30223,30224],{"class":55,"line":2528},[53,30225,30226],{}," \u003Cresources>\n",[53,30228,30229],{"class":55,"line":2539},[53,30230,30231],{}," \u003Cresource>\n",[53,30233,30234],{"class":55,"line":2551},[53,30235,30236],{}," \u003Cdirectory>src/main/resources\u003C/directory>\n",[53,30238,30239],{"class":55,"line":2562},[53,30240,30241],{}," \u003Cfiltering>true\u003C/filtering>\n",[53,30243,30244],{"class":55,"line":2573},[53,30245,30246],{}," \u003C/resource>\n",[53,30248,30249],{"class":55,"line":2585},[53,30250,30251],{}," \u003C/resources>\n",[53,30253,30254],{"class":55,"line":2593},[53,30255,30256],{},"\u003C/build>\n",[18,30258,30259],{},"The resource filtering is needed, because we’ll use placeholders in the liquibase files.",[18,30261,30262],{},"In the configurations, we specify to output the whole sql that is executed by liquibase to be exported to a specific\nfile. Furthermore we use a different configuration file, based on the environment that liquibase is built against. With\nthis, we can specify some configs that are the same for each test server, for each staging server, or for each\nproduction server regardless of the branch, without the need to put it in every maven profile (more on the maven\nprofiles later).",[649,30264,30266],{"id":30265},"the-folder-structure","The folder structure",[18,30268,30269],{},"As for the liquibase folder structure, we set it up as followed:",[43,30271,30273],{"className":13786,"code":30272,"language":13788,"meta":48,"style":48},"src/main/resources/application1/\n├── changes \u003C-- folder for changes\n├── data \u003C-- folder for the initial data imports\n│ ├── all \u003C-- Liqiubase scripts , that are executed\n│ │ │ for every branch\n│ │ ├── csv \u003C-- CSV files for data imports\n│ │ ├── data-xyz-001.xml \u003C-- liquibase scripts\n│ │ └── data-xyz-002.xml\n│ ├── branchX \u003C-- Branch specific folder. contains scripts and\n│ └── branchY ... csv files, that are specific for this branch\n├── init \u003C-- folder for database init (executed as SYSDBA)\n├── install \u003C-- folder for some more database initialisation.\n│ that can be executed as the actual user,\n│ table creation and stuff\n├── db.changelog.xml \u003C-- Liquibase Changelog that contains references\n│ to the single liquibase scripts\n└── db.init.xml \u003C-- Initial Liquibase Changelog that\n has to be executed as @SYSDBA@\n and sets up the schemas and users (init folder)\n",[50,30274,30275,30280,30285,30290,30295,30300,30305,30310,30315,30320,30325,30330,30335,30340,30345,30350,30355,30360,30365],{"__ignoreMap":48},[53,30276,30277],{"class":55,"line":56},[53,30278,30279],{},"src/main/resources/application1/\n",[53,30281,30282],{"class":55,"line":86},[53,30283,30284],{},"├── changes \u003C-- folder for changes\n",[53,30286,30287],{"class":55,"line":126},[53,30288,30289],{},"├── data \u003C-- folder for the initial data imports\n",[53,30291,30292],{"class":55,"line":163},[53,30293,30294],{},"│ ├── all \u003C-- Liqiubase scripts , that are executed\n",[53,30296,30297],{"class":55,"line":186},[53,30298,30299],{},"│ │ │ for every branch\n",[53,30301,30302],{"class":55,"line":221},[53,30303,30304],{},"│ │ ├── csv \u003C-- CSV files for data imports\n",[53,30306,30307],{"class":55,"line":242},[53,30308,30309],{},"│ │ ├── data-xyz-001.xml \u003C-- liquibase scripts\n",[53,30311,30312],{"class":55,"line":273},[53,30313,30314],{},"│ │ └── data-xyz-002.xml\n",[53,30316,30317],{"class":55,"line":279},[53,30318,30319],{},"│ ├── branchX \u003C-- Branch specific folder. contains scripts and\n",[53,30321,30322],{"class":55,"line":496},[53,30323,30324],{},"│ └── branchY ... csv files, that are specific for this branch\n",[53,30326,30327],{"class":55,"line":503},[53,30328,30329],{},"├── init \u003C-- folder for database init (executed as SYSDBA)\n",[53,30331,30332],{"class":55,"line":509},[53,30333,30334],{},"├── install \u003C-- folder for some more database initialisation.\n",[53,30336,30337],{"class":55,"line":515},[53,30338,30339],{},"│ that can be executed as the actual user,\n",[53,30341,30342],{"class":55,"line":521},[53,30343,30344],{},"│ table creation and stuff\n",[53,30346,30347],{"class":55,"line":527},[53,30348,30349],{},"├── db.changelog.xml \u003C-- Liquibase Changelog that contains references\n",[53,30351,30352],{"class":55,"line":533},[53,30353,30354],{},"│ to the single liquibase scripts\n",[53,30356,30357],{"class":55,"line":539},[53,30358,30359],{},"└── db.init.xml \u003C-- Initial Liquibase Changelog that\n",[53,30361,30362],{"class":55,"line":545},[53,30363,30364],{}," has to be executed as @SYSDBA@\n",[53,30366,30367],{"class":55,"line":2414},[53,30368,30369],{}," and sets up the schemas and users (init folder)\n",[649,30371,30373],{"id":30372},"the-liquibase-changelog","The liquibase changelog",[18,30375,30376],{},"Here’s an example for the changelog file:",[43,30378,30380],{"className":3792,"code":30379,"language":3794,"meta":48,"style":48},"\u003C?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n\u003CdatabaseChangeLog xmlns=\"http://www.liquibase.org/xml/ns/dbchangelog\"\n xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n xsi:schemaLocation=\"http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-2.0.xsd\">\n \u003Cinclude relativeToChangelogFile=\"true\" file=\"install/all/tables.xml\"/>\n \u003Cinclude relativeToChangelogFile=\"true\" file=\"install/all/procedures.xml\"/>\n ...\n \u003Cinclude relativeToChangelogFile=\"true\" file=\"changes/all/table_add_column_xyz.xml\"/>\n \u003Cinclude relativeToChangelogFile=\"true\" file=\"changes/${projectname.branch}/adjust_procedure_asd.xml\"/>\n ...\n\u003C/databaseChangeLog>\n",[50,30381,30382,30387,30392,30397,30402,30407,30412,30416,30421,30426,30430],{"__ignoreMap":48},[53,30383,30384],{"class":55,"line":56},[53,30385,30386],{},"\u003C?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n",[53,30388,30389],{"class":55,"line":86},[53,30390,30391],{},"\u003CdatabaseChangeLog xmlns=\"http://www.liquibase.org/xml/ns/dbchangelog\"\n",[53,30393,30394],{"class":55,"line":126},[53,30395,30396],{}," xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n",[53,30398,30399],{"class":55,"line":163},[53,30400,30401],{}," xsi:schemaLocation=\"http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-2.0.xsd\">\n",[53,30403,30404],{"class":55,"line":186},[53,30405,30406],{}," \u003Cinclude relativeToChangelogFile=\"true\" file=\"install/all/tables.xml\"/>\n",[53,30408,30409],{"class":55,"line":221},[53,30410,30411],{}," \u003Cinclude relativeToChangelogFile=\"true\" file=\"install/all/procedures.xml\"/>\n",[53,30413,30414],{"class":55,"line":242},[53,30415,322],{},[53,30417,30418],{"class":55,"line":273},[53,30419,30420],{}," \u003Cinclude relativeToChangelogFile=\"true\" file=\"changes/all/table_add_column_xyz.xml\"/>\n",[53,30422,30423],{"class":55,"line":279},[53,30424,30425],{}," \u003Cinclude relativeToChangelogFile=\"true\" file=\"changes/${projectname.branch}/adjust_procedure_asd.xml\"/>\n",[53,30427,30428],{"class":55,"line":496},[53,30429,322],{},[53,30431,30432],{"class":55,"line":503},[53,30433,30434],{},"\u003C/databaseChangeLog>\n",[18,30436,30437,30438,30443],{},"As you can see, we have used the ",[27,30439,30440],{},[573,30441,30442],{},"${projectname.branch}"," placeholder in the path of a changelog. The file that is\nreferenced there, has to be added for each of the branches, because this changelog is also used for every branch. This\ncan be somewhat inconvenient in some times, when you only have to add a change to one of the branches, but that should\nnot happen that often. It’s more likely (at least for our case) that you have to adjust the same thing for all branches,\nbut a little differnt, or fill some table with different data.",[18,30445,30446],{},"Also, the right execution order of the scripts is secured this way. Furthermore, we don’t have to create and update one\nchangelog for every branch, where it can easily happen, that one file is left out and it goes through unnoticed. In our\nsetup, if you forget to add a file that’s declared in the changelog, that’s another case, because you will know it as\nsoon as you execute the script for the specific branch. So we considered this to be the best method to address multiple\nbranches.",[18,30448,30449,30450,30453,30454,30456],{},"You can also use the placeholder in other places, like the ",[573,30451,30452],{},"loadUpdateData"," tag, where you can specify a .csv file from\nwhich liquibase will load data. There, You’ll only need to add the changelog to the ‘",[573,30455,19024],{},"‘ folder and the .csv files in\neach branch folder. Furthermore, we are",[649,30458,30460],{"id":30459},"maven-profiles","maven profiles",[18,30462,30463],{},"To configure and execute liquibase, we use different maven profiles. We need to specify the url, username and password\nfor each server, so we have one profile for each of them. The properties that are the same based on the environment (\ntest, stage, prod), are defined in a config file included from the pom (as already seen above), so we also need to add a\nproperty for the environment in each profile. Like this we can create a liquibase profile for each application of an\nenvironment of a branch (yup, there are quite some profiles because of this, but it is simply needed – you don’t have to\nkeep them in your settings.xml all the time, though, so it isn’t that much of a pain, once they are created 😛 ). By\nsetting the username and password locally in the maven settings.xml, we also keep sure that no passwords are commited in\nour version control.",[18,30465,30466],{},"example profile:",[43,30468,30470],{"className":3792,"code":30469,"language":3794,"meta":48,"style":48},"\n \u003Cprofile>\n \u003Cid>xyz-test\u003C/id>\n \u003Cproperties>\n \u003Cprojectname.branch>xyz\u003C/projectname.branch>\n \u003Cprojectname.environment>test\u003C/projectname.environment>\n \u003Cprojectname.dbName>dbname\u003C/projectname.dbName>\n \u003Cprojectname.liquibase.url>jdbc:oracle:thin:@192.168.224.234:1521:DBID\u003C/projectname.liquibase.url>\n \u003Cprojectname.liquibase.username>username\u003C/projectname.liquibase.username>\n \u003Cprojectname.liquibase.password>password\u003C/projectname.liquibase.password>\n \u003Cprojectname.liquibase.schemaName>schema\u003C/projectname.liquibase.schemaName>\n \u003Cprojectname.liquibase.changeLogFile>target/classes/path/to/changelog/db.changelog.xml\u003C/projectname.liquibase.changeLogFile>\n \u003C/properties>\n \u003C/profile>\n",[50,30471,30472,30476,30481,30486,30490,30495,30500,30505,30510,30515,30520,30525,30530,30534],{"__ignoreMap":48},[53,30473,30474],{"class":55,"line":56},[53,30475,500],{"emptyLinePlaceholder":499},[53,30477,30478],{"class":55,"line":86},[53,30479,30480],{}," \u003Cprofile>\n",[53,30482,30483],{"class":55,"line":126},[53,30484,30485],{}," \u003Cid>xyz-test\u003C/id>\n",[53,30487,30488],{"class":55,"line":163},[53,30489,27058],{},[53,30491,30492],{"class":55,"line":186},[53,30493,30494],{}," \u003Cprojectname.branch>xyz\u003C/projectname.branch>\n",[53,30496,30497],{"class":55,"line":221},[53,30498,30499],{}," \u003Cprojectname.environment>test\u003C/projectname.environment>\n",[53,30501,30502],{"class":55,"line":242},[53,30503,30504],{}," \u003Cprojectname.dbName>dbname\u003C/projectname.dbName>\n",[53,30506,30507],{"class":55,"line":273},[53,30508,30509],{}," \u003Cprojectname.liquibase.url>jdbc:oracle:thin:@192.168.224.234:1521:DBID\u003C/projectname.liquibase.url>\n",[53,30511,30512],{"class":55,"line":279},[53,30513,30514],{}," \u003Cprojectname.liquibase.username>username\u003C/projectname.liquibase.username>\n",[53,30516,30517],{"class":55,"line":496},[53,30518,30519],{}," \u003Cprojectname.liquibase.password>password\u003C/projectname.liquibase.password>\n",[53,30521,30522],{"class":55,"line":503},[53,30523,30524],{}," \u003Cprojectname.liquibase.schemaName>schema\u003C/projectname.liquibase.schemaName>\n",[53,30526,30527],{"class":55,"line":509},[53,30528,30529],{}," \u003Cprojectname.liquibase.changeLogFile>target/classes/path/to/changelog/db.changelog.xml\u003C/projectname.liquibase.changeLogFile>\n",[53,30531,30532],{"class":55,"line":515},[53,30533,27073],{},[53,30535,30536],{"class":55,"line":521},[53,30537,30538],{}," \u003C/profile>\n",[18,30540,30541],{},"With this config, it uses the property file target/classes/liquibase-test.properties (keep in mind, the file initially\nlies in the folder src/main/resources, but because we build the project before we execute liquibase, it is then located\nunder target/classes/ , with its parameters replaced by our properties).",[18,30543,30544],{},"liquibase-test.properties:",[43,30546,30548],{"className":13786,"code":30547,"language":13788,"meta":48,"style":48},"changeLogFile=${projectname.liquibase.changeLogFile}\ndriver=oracle.jdbc.OracleDriver\nurl=${projectname.liquibase.url}\nusername=${projectname.liquibase.username}\npassword=${projectname.liquibase.password}\ndefaultSchemaName=${projectname.liquibase.schemaName}\nverbose=true\ndropFirst=false\n",[50,30549,30550,30555,30560,30565,30570,30575,30580,30585],{"__ignoreMap":48},[53,30551,30552],{"class":55,"line":56},[53,30553,30554],{},"changeLogFile=${projectname.liquibase.changeLogFile}\n",[53,30556,30557],{"class":55,"line":86},[53,30558,30559],{},"driver=oracle.jdbc.OracleDriver\n",[53,30561,30562],{"class":55,"line":126},[53,30563,30564],{},"url=${projectname.liquibase.url}\n",[53,30566,30567],{"class":55,"line":163},[53,30568,30569],{},"username=${projectname.liquibase.username}\n",[53,30571,30572],{"class":55,"line":186},[53,30573,30574],{},"password=${projectname.liquibase.password}\n",[53,30576,30577],{"class":55,"line":221},[53,30578,30579],{},"defaultSchemaName=${projectname.liquibase.schemaName}\n",[53,30581,30582],{"class":55,"line":242},[53,30583,30584],{},"verbose=true\n",[53,30586,30587],{"class":55,"line":273},[53,30588,30589],{},"dropFirst=false\n",[18,30591,30592],{},"Here we map our properties from the profiles to the actual liquibase property names and also set a few other liquibase\nconfigs.",[18,30594,30595],{},"For scripts you need to execute in another schema as the one the db user has set as the default schema, we also set the\ndefaultSchemaName property of liquibase (mainly the case, if we execute scripts as the SYSDBA user).",[2207,30597,30599],{"id":30598},"execution-conclusion","Execution & Conclusion",[18,30601,30602],{},"Because of the use of maven, we can execute all of the changes from our local machines very easy:",[43,30604,30606],{"className":13786,"code":30605,"language":13788,"meta":48,"style":48},"mvn clean install -Pxyz-test\n",[50,30607,30608],{"__ignoreMap":48},[53,30609,30610],{"class":55,"line":56},[53,30611,30605],{},[18,30613,30614],{},"If you connect against a remote server, you are even warned with a dialogue that contains the database name, url and\nusername, it wants to execute the scripts on, before the scripts are actually executed. So you can check them again and\nabort the migration if you used the wrong profile.",[18,30616,30617],{},"With this setup we can now add scripts for only one branch, multiple branches, or all branches, without having to worry\nto forget to add one change to a branch and leaving the error unnoticed. Even if we forget to put some file in the\nfolder of one branch, our changelog file is global for all branches! So if we try to execute it the next time, liquibase\nnotices the missing file and informs us about this (and aborts the execution). And because we don’t have different\nfolders for the environments, but only the branches, this gets noticed on the test machines.",[18,30619,30620],{},"Please let us know what you think of our approach and if you know an even better one!",[607,30622,989],{},{"title":48,"searchDepth":86,"depth":86,"links":30624},[30625,30626,30632],{"id":30018,"depth":86,"text":30019},{"id":30062,"depth":86,"text":30063,"children":30627},[30628,30629,30630,30631],{"id":30066,"depth":126,"text":30067},{"id":30265,"depth":126,"text":30266},{"id":30372,"depth":126,"text":30373},{"id":30459,"depth":126,"text":30460},{"id":30598,"depth":86,"text":30599},[613],"2013-04-12T11:19:55","In this post, we want to show you our Liquibase setup in a larger scale project that we’ve\\nbeen developing for some time now.","https://synyx.de/blog/liquibase-our-setup-in-a-larger-scale-project/",{},"/blog/liquibase-our-setup-in-a-larger-scale-project",{"title":30000,"description":30640},"In this post, we want to show you our Liquibase setup in a larger scale project that we’ve\nbeen developing for some time now.","blog/liquibase-our-setup-in-a-larger-scale-project",[30643,30644,30645,30646],"database","database-change-management","database-migration","liquibase","In this post, we want to show you our Liquibase setup in a larger scale project that we’ve been developing for some time now. Gather Requirements First off, a bit…","Edhd0rsE8wWORJZowAN7nLiXbUivWvpaqbjev0vh3MQ",{"id":30650,"title":30651,"author":30652,"body":30654,"category":32771,"date":32773,"description":48,"extension":617,"link":32774,"meta":32775,"navigation":499,"path":32776,"seo":32777,"slug":30658,"stem":32778,"tags":32779,"teaser":32783,"__hash__":32784},"blog/blog/continuous-deployment-automatic-backup-script.md","Continuous Deployment – Automatic Backup Script",[30653],"buchloh",{"type":11,"value":30655,"toc":32753},[30656,30659,30663,30678,30682,30691,30700,30703,30706,30717,30724,30728,30749,30752,30787,30790,30799,30852,30866,30872,30886,30890,30893,30913,31111,31115,31118,31122,31125,31139,31146,31256,31260,31263,31580,31584,31587,31796,31800,31806,31815,31818,31852,31855,31864,31867,32021,32025,32028,32039,32049,32291,32295,32301,32318,32335,32349,32521,32525,32528,32547,32701,32712,32716,32725,32729,32732,32736,32743,32750],[14,30657,30651],{"id":30658},"continuous-deployment-automatic-backup-script",[2207,30660,30662],{"id":30661},"a-few-words-about-continuous-deployment","A few words about Continuous Deployment",[11259,30664,30665,30668,30671],{},[18,30666,30667],{},"Continuous Deployment is the deployment or release of code to Production as soon as it is ready. (…) The automated\nprocess is key because it should be able to be performed by anyone in a matter of minutes (preferably by the press of a\nbutton).",[18,30669,30670],{},"Once you have moved to a Continuous Deployment process, you will have to have several pieces of automation in place.\nYou must automate your Continuous Integration Build Server and Continuous Delivery to Staging, as well as have the\nability to automatically deploy to Production.",[18,30672,30673],{},[585,30674,30677],{"href":30675,"rel":30676,"title":15096},"http://blog.assembla.com/assemblablog/tabid/12618/bid/92411/Continuous-Delivery-vs-Continuous-Deployment-vs-Continuous-Integration-Wait-huh.aspx",[589],"Read full blog post",[2207,30679,30681],{"id":30680},"were-on-the-way","…we’re on the way",[18,30683,30684,30685,30690],{},"Because every manual step implies a risk for failure, our goal is to minimize such risks as well as our amount of work\nby automating our processes of software delivery. In several of our applications we already use a deployment script for\nautomatic deployment of ",[585,30686,30689],{"href":30687,"rel":30688,"title":30689},"http://tomcat.apache.org/",[589],"Tomcat"," applications.",[18,30692,30693,30694,30699],{},"For some applications we even use a continuous deployment script triggered by crontab (e.g. every hour) to check\nif ",[585,30695,30698],{"href":30696,"rel":30697,"title":30698},"http://www.sonatype.org/nexus/",[589],"Nexus"," has a new version of the application and if so to fetch and deploy it\nautomatically.",[18,30701,30702],{},"However the backup process was still manual – although you always perform the same steps. This is a perfect opportunity\nfor automation. And a perfect opportunity for me to leave the Java world for a while and to learn more about shell\nscripting.",[18,30704,30705],{},"The typical backup steps before a deployment are:",[577,30707,30708,30711,30714],{},[580,30709,30710],{},"saving war file resp. information about the current deployed version",[580,30712,30713],{},"saving database data in a dump",[580,30715,30716],{},"saving directories and/or files, e.g. generated error reports",[18,30718,30719,30720,986],{},"This blog post explains the configuration and the most important steps of the backup script. You can find the entire\nbackup script and the configuration files on ",[585,30721,15107],{"href":30722,"rel":30723,"title":15107},"https://github.com/murygina/automated-backup",[589],[2207,30725,30727],{"id":30726},"configuration-for-mysqldump","Configuration for mysqldump",[18,30729,10847,30730,30733,30734,30737,30738,30741,30742,30745,30746,30748],{},[50,30731,30732],{},"[mysqldump](http://dev.mysql.com/doc/refman/5.1/de/mysqldump.html \"mysqldump\")"," client is the usual command line\ntool to dump a database for backup or for transferring the database data. The created dump contains SQL statements to\ncreate and/or to populate table(s). To be able to execute ",[50,30735,30736],{},"mysqldump"," in an automatic process with nobody around to\nenter the password, you have to provide the access data in a file. Create a ",[50,30739,30740],{},".ini","-style file named ",[50,30743,30744],{},".my.cnf"," providing\nuser and password information. Later in the script, ",[50,30747,30736],{}," will read this configuration and will automatically use\nthe supplied user name and password.",[18,30750,30751],{},"If you have only one database to be dumped, following lines are enough:",[43,30753,30757],{"className":30754,"code":30755,"language":30756,"meta":48,"style":48},"language-shell shiki shiki-themes github-light github-dark","\n[mysqldump]\n user = myUser\n password = xxx\n\n","shell",[50,30758,30759,30763,30768,30777],{"__ignoreMap":48},[53,30760,30761],{"class":55,"line":56},[53,30762,500],{"emptyLinePlaceholder":499},[53,30764,30765],{"class":55,"line":86},[53,30766,30767],{"class":82},"[mysqldump]\n",[53,30769,30770,30772,30774],{"class":55,"line":126},[53,30771,180],{"class":59},[53,30773,1245],{"class":63},[53,30775,30776],{"class":63}," myUser\n",[53,30778,30779,30782,30784],{"class":55,"line":163},[53,30780,30781],{"class":59}," password",[53,30783,1245],{"class":63},[53,30785,30786],{"class":63}," xxx\n",[18,30788,30789],{},"If you have more than one database to be dumped needing different access data, you have to specify an access data\nsection for every database.",[18,30791,30792,30793,11792,30796,986],{},"For example, if you want the databases “foo” and “bar” to be dumped, you’ll need two sections with the corresponding\ndatabase name as suffix: ",[50,30794,30795],{},"[mysqldumpfoo]",[50,30797,30798],{},"[mysqldumpbar]",[43,30800,30802],{"className":30754,"code":30801,"language":30756,"meta":48,"style":48},"\n[mysqldumpfoo]\n user = fooUser\n password = xxx\n[mysqldumpbar]\n user = barUser\n password = xxx\n\n",[50,30803,30804,30808,30813,30822,30830,30835,30844],{"__ignoreMap":48},[53,30805,30806],{"class":55,"line":56},[53,30807,500],{"emptyLinePlaceholder":499},[53,30809,30810],{"class":55,"line":86},[53,30811,30812],{"class":82},"[mysqldumpfoo]\n",[53,30814,30815,30817,30819],{"class":55,"line":126},[53,30816,180],{"class":59},[53,30818,1245],{"class":63},[53,30820,30821],{"class":63}," fooUser\n",[53,30823,30824,30826,30828],{"class":55,"line":163},[53,30825,30781],{"class":59},[53,30827,1245],{"class":63},[53,30829,30786],{"class":63},[53,30831,30832],{"class":55,"line":186},[53,30833,30834],{"class":82},"[mysqldumpbar]\n",[53,30836,30837,30839,30841],{"class":55,"line":221},[53,30838,180],{"class":59},[53,30840,1245],{"class":63},[53,30842,30843],{"class":63}," barUser\n",[53,30845,30846,30848,30850],{"class":55,"line":242},[53,30847,30781],{"class":59},[53,30849,1245],{"class":63},[53,30851,30786],{"class":63},[18,30853,30854,30857,30858,30861,30862,30865],{},[27,30855,30856],{},"Caution:"," In this case there ",[27,30859,30860],{},"must not"," be a ",[50,30863,30864],{},"[mysqldump]"," section since the more specific sections with database\nname as suffix would be ignored.",[18,30867,30868,30869,30871],{},"If you want to prevent other users from reading your ",[50,30870,30744],{},", change the file permission so that only the owner has\nfull read and write permissions.",[43,30873,30875],{"className":13786,"code":30874,"language":13788,"meta":48,"style":48},"\nchmod 600 ~/.my.cnf\n\n",[50,30876,30877,30881],{"__ignoreMap":48},[53,30878,30879],{"class":55,"line":56},[53,30880,500],{"emptyLinePlaceholder":499},[53,30882,30883],{"class":55,"line":86},[53,30884,30885],{},"chmod 600 ~/.my.cnf\n",[2207,30887,30889],{"id":30888},"configuration-for-backup-script","Configuration for Backup script",[18,30891,30892],{},"The backup script will depend on a configuration file containing details for the backup process:",[577,30894,30895,30898,30901,30904,30907,30910],{},[580,30896,30897],{},"the absolute path of the deployed webapp",[580,30899,30900],{},"the absolute path of the parent directory where the backups should be stored",[580,30902,30903],{},"the names of the databases to be dumped",[580,30905,30906],{},"the absolute path of the mysqldump config file with access data",[580,30908,30909],{},"if the deployed war file itself or if only the information about the current deployed version should be saved",[580,30911,30912],{},"if there are files and/or directories that should be moved or copied to the backup directory",[43,30914,30916],{"className":30754,"code":30915,"language":30756,"meta":48,"style":48},"\n# absolute path of the deployed webapp\nWEBAPP_PATH=\"$HOME/tomcat/webapps/ROOT\"\n# absolute path where the backups should be stored\nBACKUP_PATH=\"$HOME/backup\"\n# names of the databases to be dumped\n# separate with whitespace\nDB_NAME=\"db1 db2 db3\"\n# absolute path of the mysql config file with user data\n# for the above specified databases\nMYSQL_CONF=\"$HOME/.my.cnf\"\n# decide if current deployed war should be saved in backup dir\n# or if only the information about the version that is deployed\n# should be saved\n#\n# 0: save war only if the current deployed version is a snapshot\n# 1: always save war\n#\n# if you specify nothing or something that is not 0 or 1,\n# current deployed war file won't be saved\nWAR=1\n# specifiy folders and/or files (absolute path!)\n# that should be copied resp. moved into backup dir\n# separate with whitespace\nCONTENT_TO_BE_COPIED=\"$HOME/file $HOME/folder\"\nCONTENT_TO_BE_MOVED=\"$HOME/folder/file $HOME/folder/*\"\n\n",[50,30917,30918,30922,30927,30942,30947,30961,30966,30971,30981,30986,30991,31005,31010,31015,31020,31025,31030,31035,31039,31044,31049,31059,31064,31069,31073,31092],{"__ignoreMap":48},[53,30919,30920],{"class":55,"line":56},[53,30921,500],{"emptyLinePlaceholder":499},[53,30923,30924],{"class":55,"line":86},[53,30925,30926],{"class":3698},"# absolute path of the deployed webapp\n",[53,30928,30929,30932,30934,30936,30939],{"class":55,"line":126},[53,30930,30931],{"class":82},"WEBAPP_PATH",[53,30933,390],{"class":389},[53,30935,2385],{"class":63},[53,30937,30938],{"class":82},"$HOME",[53,30940,30941],{"class":63},"/tomcat/webapps/ROOT\"\n",[53,30943,30944],{"class":55,"line":163},[53,30945,30946],{"class":3698},"# absolute path where the backups should be stored\n",[53,30948,30949,30952,30954,30956,30958],{"class":55,"line":186},[53,30950,30951],{"class":82},"BACKUP_PATH",[53,30953,390],{"class":389},[53,30955,2385],{"class":63},[53,30957,30938],{"class":82},[53,30959,30960],{"class":63},"/backup\"\n",[53,30962,30963],{"class":55,"line":221},[53,30964,30965],{"class":3698},"# names of the databases to be dumped\n",[53,30967,30968],{"class":55,"line":242},[53,30969,30970],{"class":3698},"# separate with whitespace\n",[53,30972,30973,30976,30978],{"class":55,"line":273},[53,30974,30975],{"class":82},"DB_NAME",[53,30977,390],{"class":389},[53,30979,30980],{"class":63},"\"db1 db2 db3\"\n",[53,30982,30983],{"class":55,"line":279},[53,30984,30985],{"class":3698},"# absolute path of the mysql config file with user data\n",[53,30987,30988],{"class":55,"line":496},[53,30989,30990],{"class":3698},"# for the above specified databases\n",[53,30992,30993,30996,30998,31000,31002],{"class":55,"line":503},[53,30994,30995],{"class":82},"MYSQL_CONF",[53,30997,390],{"class":389},[53,30999,2385],{"class":63},[53,31001,30938],{"class":82},[53,31003,31004],{"class":63},"/.my.cnf\"\n",[53,31006,31007],{"class":55,"line":509},[53,31008,31009],{"class":3698},"# decide if current deployed war should be saved in backup dir\n",[53,31011,31012],{"class":55,"line":515},[53,31013,31014],{"class":3698},"# or if only the information about the version that is deployed\n",[53,31016,31017],{"class":55,"line":521},[53,31018,31019],{"class":3698},"# should be saved\n",[53,31021,31022],{"class":55,"line":527},[53,31023,31024],{"class":3698},"#\n",[53,31026,31027],{"class":55,"line":533},[53,31028,31029],{"class":3698},"# 0: save war only if the current deployed version is a snapshot\n",[53,31031,31032],{"class":55,"line":539},[53,31033,31034],{"class":3698},"# 1: always save war\n",[53,31036,31037],{"class":55,"line":545},[53,31038,31024],{"class":3698},[53,31040,31041],{"class":55,"line":2414},[53,31042,31043],{"class":3698},"# if you specify nothing or something that is not 0 or 1,\n",[53,31045,31046],{"class":55,"line":2426},[53,31047,31048],{"class":3698},"# current deployed war file won't be saved\n",[53,31050,31051,31054,31056],{"class":55,"line":2438},[53,31052,31053],{"class":82},"WAR",[53,31055,390],{"class":389},[53,31057,31058],{"class":63},"1\n",[53,31060,31061],{"class":55,"line":2451},[53,31062,31063],{"class":3698},"# specifiy folders and/or files (absolute path!)\n",[53,31065,31066],{"class":55,"line":2459},[53,31067,31068],{"class":3698},"# that should be copied resp. moved into backup dir\n",[53,31070,31071],{"class":55,"line":2470},[53,31072,30970],{"class":3698},[53,31074,31075,31078,31080,31082,31084,31087,31089],{"class":55,"line":2476},[53,31076,31077],{"class":82},"CONTENT_TO_BE_COPIED",[53,31079,390],{"class":389},[53,31081,2385],{"class":63},[53,31083,30938],{"class":82},[53,31085,31086],{"class":63},"/file ",[53,31088,30938],{"class":82},[53,31090,31091],{"class":63},"/folder\"\n",[53,31093,31094,31097,31099,31101,31103,31106,31108],{"class":55,"line":2484},[53,31095,31096],{"class":82},"CONTENT_TO_BE_MOVED",[53,31098,390],{"class":389},[53,31100,2385],{"class":63},[53,31102,30938],{"class":82},[53,31104,31105],{"class":63},"/folder/file ",[53,31107,30938],{"class":82},[53,31109,31110],{"class":63},"/folder/*\"\n",[2207,31112,31114],{"id":31113},"the-backup-script","The backup script",[18,31116,31117],{},"Now let’s have a look at the actual backup script and the most important steps of it.",[649,31119,31121],{"id":31120},"get-configuration-file","Get configuration file",[18,31123,31124],{},"You can call the script with the option -c to specify which configuration file should be used.",[43,31126,31128],{"className":13786,"code":31127,"language":13788,"meta":48,"style":48},"\n./backup.sh -c config/custom.conf\n\n",[50,31129,31130,31134],{"__ignoreMap":48},[53,31131,31132],{"class":55,"line":56},[53,31133,500],{"emptyLinePlaceholder":499},[53,31135,31136],{"class":55,"line":86},[53,31137,31138],{},"./backup.sh -c config/custom.conf\n",[18,31140,31141,31142,31145],{},"If you call the script without the -c option, the script tries to get a file named ",[50,31143,31144],{},"backup.conf"," in the directory the\nscript is located.",[43,31147,31149],{"className":30754,"code":31148,"language":30756,"meta":48,"style":48},"\n# default config file\nCONF_FILE=\"$(dirname \"$0\")/backup.conf\"\n# a colon after the option means that the option\n# expects an argument\nwhile getopts \"hc:\" OPTION; do\ncase \"$OPTION\" in\nc)\nCONF_FILE=$OPTARG\n;;\nesac\ndone\n\n",[50,31150,31151,31155,31160,31181,31186,31191,31211,31226,31233,31242,31246,31251],{"__ignoreMap":48},[53,31152,31153],{"class":55,"line":56},[53,31154,500],{"emptyLinePlaceholder":499},[53,31156,31157],{"class":55,"line":86},[53,31158,31159],{"class":3698},"# default config file\n",[53,31161,31162,31165,31167,31170,31173,31175,31178],{"class":55,"line":126},[53,31163,31164],{"class":82},"CONF_FILE",[53,31166,390],{"class":389},[53,31168,31169],{"class":63},"\"$(",[53,31171,31172],{"class":59},"dirname",[53,31174,6452],{"class":63},[53,31176,31177],{"class":89},"$0",[53,31179,31180],{"class":63},"\")/backup.conf\"\n",[53,31182,31183],{"class":55,"line":163},[53,31184,31185],{"class":3698},"# a colon after the option means that the option\n",[53,31187,31188],{"class":55,"line":186},[53,31189,31190],{"class":3698},"# expects an argument\n",[53,31192,31193,31196,31199,31202,31205,31208],{"class":55,"line":221},[53,31194,31195],{"class":389},"while",[53,31197,31198],{"class":89}," getopts",[53,31200,31201],{"class":63}," \"hc:\"",[53,31203,31204],{"class":63}," OPTION",[53,31206,31207],{"class":82},"; ",[53,31209,31210],{"class":389},"do\n",[53,31212,31213,31216,31218,31221,31223],{"class":55,"line":242},[53,31214,31215],{"class":389},"case",[53,31217,6452],{"class":63},[53,31219,31220],{"class":82},"$OPTION",[53,31222,2385],{"class":63},[53,31224,31225],{"class":389}," in\n",[53,31227,31228,31231],{"class":55,"line":273},[53,31229,13421],{"class":31230},"sA_wV",[53,31232,685],{"class":389},[53,31234,31235,31237,31239],{"class":55,"line":279},[53,31236,31164],{"class":82},[53,31238,390],{"class":389},[53,31240,31241],{"class":82},"$OPTARG\n",[53,31243,31244],{"class":55,"line":496},[53,31245,12031],{"class":82},[53,31247,31248],{"class":55,"line":503},[53,31249,31250],{"class":389},"esac\n",[53,31252,31253],{"class":55,"line":509},[53,31254,31255],{"class":389},"done\n",[649,31257,31259],{"id":31258},"read-and-validate-configuration-file","Read and validate configuration file",[18,31261,31262],{},"The script makes sure that the configuration file exists and contains all the required parameters. Otherwise it will\nfail with an appropriate error message.",[43,31264,31266],{"className":30754,"code":31265,"language":30756,"meta":48,"style":48},"\n# variable is empty\nif [ -z \"$CONF_FILE\" ]; then\n echo \"CONF_FILE not set\"\n exit 1\nfi\n# no file found for the given variable\nif [ ! -f \"$CONF_FILE\" ]; then\n echo \"No configuration file found\"\n exit 1\nfi\n# read in variables of conf file\n. \"$CONF_FILE\"\n# make sure that conf file contains all required variables\nif [ -z \"$WEBAPP_PATH\" ] || [ -z \"$BACKUP_PATH\" ] || [ -z \"$DB_NAME\" ] || [ -z \"$MYSQL_CONF\" ]; then\n echo \"Configuration file seems to be incorrect: required variables missing. Please check your config file: $(readlink -f \"$CONF_FILE\")\"\n exit 1\nfi\n# make sure that WEBAPP_PATH exists\nif [ ! -d \"$WEBAPP_PATH\" ]; then\n echo \"The given webapp path doesn't exist. Please check your config file: $(readlink -f \"$CONF_FILE\")\"\n exit 1\nfi\n# make sure that mysql config file exists\nif [ ! -f \"$MYSQL_CONF\" ]; then\n echo \"The given mysql config file doesn't exist. Please check your config file: $(readlink -f \"$CONF_FILE\")\"\n exit 1\nfi\n\n",[50,31267,31268,31272,31277,31298,31306,31313,31317,31322,31343,31350,31356,31360,31365,31376,31381,31447,31466,31472,31476,31481,31501,31518,31524,31528,31533,31553,31570,31576],{"__ignoreMap":48},[53,31269,31270],{"class":55,"line":56},[53,31271,500],{"emptyLinePlaceholder":499},[53,31273,31274],{"class":55,"line":86},[53,31275,31276],{"class":3698},"# variable is empty\n",[53,31278,31279,31281,31283,31286,31288,31291,31293,31296],{"class":55,"line":126},[53,31280,4334],{"class":389},[53,31282,4104],{"class":82},[53,31284,31285],{"class":389},"-z",[53,31287,6452],{"class":63},[53,31289,31290],{"class":82},"$CONF_FILE",[53,31292,2385],{"class":63},[53,31294,31295],{"class":82}," ]; ",[53,31297,6467],{"class":389},[53,31299,31300,31303],{"class":55,"line":163},[53,31301,31302],{"class":89}," echo",[53,31304,31305],{"class":63}," \"CONF_FILE not set\"\n",[53,31307,31308,31311],{"class":55,"line":186},[53,31309,31310],{"class":89}," exit",[53,31312,4368],{"class":89},[53,31314,31315],{"class":55,"line":221},[53,31316,4373],{"class":389},[53,31318,31319],{"class":55,"line":242},[53,31320,31321],{"class":3698},"# no file found for the given variable\n",[53,31323,31324,31326,31328,31330,31333,31335,31337,31339,31341],{"class":55,"line":273},[53,31325,4334],{"class":389},[53,31327,4104],{"class":82},[53,31329,11914],{"class":389},[53,31331,31332],{"class":389}," -f",[53,31334,6452],{"class":63},[53,31336,31290],{"class":82},[53,31338,2385],{"class":63},[53,31340,31295],{"class":82},[53,31342,6467],{"class":389},[53,31344,31345,31347],{"class":55,"line":279},[53,31346,31302],{"class":89},[53,31348,31349],{"class":63}," \"No configuration file found\"\n",[53,31351,31352,31354],{"class":55,"line":496},[53,31353,31310],{"class":89},[53,31355,4368],{"class":89},[53,31357,31358],{"class":55,"line":503},[53,31359,4373],{"class":389},[53,31361,31362],{"class":55,"line":509},[53,31363,31364],{"class":3698},"# read in variables of conf file\n",[53,31366,31367,31369,31371,31373],{"class":55,"line":515},[53,31368,986],{"class":89},[53,31370,6452],{"class":63},[53,31372,31290],{"class":82},[53,31374,31375],{"class":63},"\"\n",[53,31377,31378],{"class":55,"line":521},[53,31379,31380],{"class":3698},"# make sure that conf file contains all required variables\n",[53,31382,31383,31385,31387,31389,31391,31394,31396,31399,31402,31404,31406,31408,31411,31413,31415,31417,31419,31421,31423,31426,31428,31430,31432,31434,31436,31438,31441,31443,31445],{"class":55,"line":527},[53,31384,4334],{"class":389},[53,31386,4104],{"class":82},[53,31388,31285],{"class":389},[53,31390,6452],{"class":63},[53,31392,31393],{"class":82},"$WEBAPP_PATH",[53,31395,2385],{"class":63},[53,31397,31398],{"class":82}," ] ",[53,31400,31401],{"class":389},"||",[53,31403,4104],{"class":82},[53,31405,31285],{"class":389},[53,31407,6452],{"class":63},[53,31409,31410],{"class":82},"$BACKUP_PATH",[53,31412,2385],{"class":63},[53,31414,31398],{"class":82},[53,31416,31401],{"class":389},[53,31418,4104],{"class":82},[53,31420,31285],{"class":389},[53,31422,6452],{"class":63},[53,31424,31425],{"class":82},"$DB_NAME",[53,31427,2385],{"class":63},[53,31429,31398],{"class":82},[53,31431,31401],{"class":389},[53,31433,4104],{"class":82},[53,31435,31285],{"class":389},[53,31437,6452],{"class":63},[53,31439,31440],{"class":82},"$MYSQL_CONF",[53,31442,2385],{"class":63},[53,31444,31295],{"class":82},[53,31446,6467],{"class":389},[53,31448,31449,31451,31454,31457,31459,31461,31463],{"class":55,"line":533},[53,31450,31302],{"class":89},[53,31452,31453],{"class":63}," \"Configuration file seems to be incorrect: required variables missing. Please check your config file: $(",[53,31455,31456],{"class":59},"readlink",[53,31458,31332],{"class":89},[53,31460,6452],{"class":63},[53,31462,31290],{"class":82},[53,31464,31465],{"class":63},"\")\"\n",[53,31467,31468,31470],{"class":55,"line":539},[53,31469,31310],{"class":89},[53,31471,4368],{"class":89},[53,31473,31474],{"class":55,"line":545},[53,31475,4373],{"class":389},[53,31477,31478],{"class":55,"line":2414},[53,31479,31480],{"class":3698},"# make sure that WEBAPP_PATH exists\n",[53,31482,31483,31485,31487,31489,31491,31493,31495,31497,31499],{"class":55,"line":2426},[53,31484,4334],{"class":389},[53,31486,4104],{"class":82},[53,31488,11914],{"class":389},[53,31490,4254],{"class":389},[53,31492,6452],{"class":63},[53,31494,31393],{"class":82},[53,31496,2385],{"class":63},[53,31498,31295],{"class":82},[53,31500,6467],{"class":389},[53,31502,31503,31505,31508,31510,31512,31514,31516],{"class":55,"line":2438},[53,31504,31302],{"class":89},[53,31506,31507],{"class":63}," \"The given webapp path doesn't exist. Please check your config file: $(",[53,31509,31456],{"class":59},[53,31511,31332],{"class":89},[53,31513,6452],{"class":63},[53,31515,31290],{"class":82},[53,31517,31465],{"class":63},[53,31519,31520,31522],{"class":55,"line":2451},[53,31521,31310],{"class":89},[53,31523,4368],{"class":89},[53,31525,31526],{"class":55,"line":2459},[53,31527,4373],{"class":389},[53,31529,31530],{"class":55,"line":2470},[53,31531,31532],{"class":3698},"# make sure that mysql config file exists\n",[53,31534,31535,31537,31539,31541,31543,31545,31547,31549,31551],{"class":55,"line":2476},[53,31536,4334],{"class":389},[53,31538,4104],{"class":82},[53,31540,11914],{"class":389},[53,31542,31332],{"class":389},[53,31544,6452],{"class":63},[53,31546,31440],{"class":82},[53,31548,2385],{"class":63},[53,31550,31295],{"class":82},[53,31552,6467],{"class":389},[53,31554,31555,31557,31560,31562,31564,31566,31568],{"class":55,"line":2484},[53,31556,31302],{"class":89},[53,31558,31559],{"class":63}," \"The given mysql config file doesn't exist. Please check your config file: $(",[53,31561,31456],{"class":59},[53,31563,31332],{"class":89},[53,31565,6452],{"class":63},[53,31567,31290],{"class":82},[53,31569,31465],{"class":63},[53,31571,31572,31574],{"class":55,"line":2490},[53,31573,31310],{"class":89},[53,31575,4368],{"class":89},[53,31577,31578],{"class":55,"line":2495},[53,31579,4373],{"class":389},[649,31581,31583],{"id":31582},"creating-the-backup-directories","Creating the backup directories",[18,31585,31586],{},"The script checks if the specified parent backup directory already exists – and will create it if it doesn’t. For every\nprocessed backup a time stamped folder (with the pattern yyyy-MM-dd_hh-mm) is created in this directory containing\nthe backup data. Allowed is only one backup per minute. If you try to run the script and there is already a backup\nfolder for the current minute, the script will fail with an error message.",[43,31588,31590],{"className":30754,"code":31589,"language":30756,"meta":48,"style":48},"\nCURRENT_DATE=\"$(date +%Y-%m-%d_%H-%M)\"\nFULL_BACKUP_PATH=\"$BACKUP_PATH/$CURRENT_DATE\"\n# check if backup dir exists, if not create it\nif [ ! -d \"$BACKUP_PATH\" ]; then\n mkdir \"$BACKUP_PATH\"\n echo \"Create parent backup directory $(readlink -f \"$BACKUP_PATH\")\"\nfi\n# check if there is already a dir for current date, if not create it\nif [ ! -d \"$FULL_BACKUP_PATH\" ]; then\n mkdir \"$FULL_BACKUP_PATH\"\n echo \"Create backup directory $(readlink -f \"$FULL_BACKUP_PATH\")\"\nelse\n echo \"$(readlink -f \"$FULL_BACKUP_PATH\") already exists\"\n echo \"Wait until $(readlink -f \"$BACKUP_PATH/$(date -d '+1min' +%Y-%m-%d_%H-%M)\") can be created\"\n exit 1\nfi\n\n",[50,31591,31592,31596,31610,31628,31633,31653,31664,31681,31685,31690,31711,31721,31738,31743,31761,31786,31792],{"__ignoreMap":48},[53,31593,31594],{"class":55,"line":56},[53,31595,500],{"emptyLinePlaceholder":499},[53,31597,31598,31601,31603,31605,31607],{"class":55,"line":86},[53,31599,31600],{"class":82},"CURRENT_DATE",[53,31602,390],{"class":389},[53,31604,31169],{"class":63},[53,31606,4390],{"class":59},[53,31608,31609],{"class":63}," +%Y-%m-%d_%H-%M)\"\n",[53,31611,31612,31615,31617,31619,31621,31623,31626],{"class":55,"line":126},[53,31613,31614],{"class":82},"FULL_BACKUP_PATH",[53,31616,390],{"class":389},[53,31618,2385],{"class":63},[53,31620,31410],{"class":82},[53,31622,4422],{"class":63},[53,31624,31625],{"class":82},"$CURRENT_DATE",[53,31627,31375],{"class":63},[53,31629,31630],{"class":55,"line":163},[53,31631,31632],{"class":3698},"# check if backup dir exists, if not create it\n",[53,31634,31635,31637,31639,31641,31643,31645,31647,31649,31651],{"class":55,"line":186},[53,31636,4334],{"class":389},[53,31638,4104],{"class":82},[53,31640,11914],{"class":389},[53,31642,4254],{"class":389},[53,31644,6452],{"class":63},[53,31646,31410],{"class":82},[53,31648,2385],{"class":63},[53,31650,31295],{"class":82},[53,31652,6467],{"class":389},[53,31654,31655,31658,31660,31662],{"class":55,"line":221},[53,31656,31657],{"class":59}," mkdir",[53,31659,6452],{"class":63},[53,31661,31410],{"class":82},[53,31663,31375],{"class":63},[53,31665,31666,31668,31671,31673,31675,31677,31679],{"class":55,"line":242},[53,31667,31302],{"class":89},[53,31669,31670],{"class":63}," \"Create parent backup directory $(",[53,31672,31456],{"class":59},[53,31674,31332],{"class":89},[53,31676,6452],{"class":63},[53,31678,31410],{"class":82},[53,31680,31465],{"class":63},[53,31682,31683],{"class":55,"line":273},[53,31684,4373],{"class":389},[53,31686,31687],{"class":55,"line":279},[53,31688,31689],{"class":3698},"# check if there is already a dir for current date, if not create it\n",[53,31691,31692,31694,31696,31698,31700,31702,31705,31707,31709],{"class":55,"line":496},[53,31693,4334],{"class":389},[53,31695,4104],{"class":82},[53,31697,11914],{"class":389},[53,31699,4254],{"class":389},[53,31701,6452],{"class":63},[53,31703,31704],{"class":82},"$FULL_BACKUP_PATH",[53,31706,2385],{"class":63},[53,31708,31295],{"class":82},[53,31710,6467],{"class":389},[53,31712,31713,31715,31717,31719],{"class":55,"line":503},[53,31714,31657],{"class":59},[53,31716,6452],{"class":63},[53,31718,31704],{"class":82},[53,31720,31375],{"class":63},[53,31722,31723,31725,31728,31730,31732,31734,31736],{"class":55,"line":509},[53,31724,31302],{"class":89},[53,31726,31727],{"class":63}," \"Create backup directory $(",[53,31729,31456],{"class":59},[53,31731,31332],{"class":89},[53,31733,6452],{"class":63},[53,31735,31704],{"class":82},[53,31737,31465],{"class":63},[53,31739,31740],{"class":55,"line":515},[53,31741,31742],{"class":389},"else\n",[53,31744,31745,31747,31750,31752,31754,31756,31758],{"class":55,"line":521},[53,31746,31302],{"class":89},[53,31748,31749],{"class":63}," \"$(",[53,31751,31456],{"class":59},[53,31753,31332],{"class":89},[53,31755,6452],{"class":63},[53,31757,31704],{"class":82},[53,31759,31760],{"class":63},"\") already exists\"\n",[53,31762,31763,31765,31768,31770,31772,31774,31776,31779,31781,31783],{"class":55,"line":527},[53,31764,31302],{"class":89},[53,31766,31767],{"class":63}," \"Wait until $(",[53,31769,31456],{"class":59},[53,31771,31332],{"class":89},[53,31773,6452],{"class":63},[53,31775,31410],{"class":82},[53,31777,31778],{"class":63},"/$(",[53,31780,4390],{"class":59},[53,31782,4254],{"class":89},[53,31784,31785],{"class":63}," '+1min' +%Y-%m-%d_%H-%M)\") can be created\"\n",[53,31787,31788,31790],{"class":55,"line":533},[53,31789,31310],{"class":89},[53,31791,4368],{"class":89},[53,31793,31794],{"class":55,"line":539},[53,31795,4373],{"class":389},[649,31797,31799],{"id":31798},"save-the-information-about-the-current-deployed-version","Save the information about the current deployed version",[18,31801,31802,31803,4101],{},"You find the information about the current deployed version in the ",[50,31804,31805],{},"pom.properties",[43,31807,31809],{"className":13786,"code":31808,"language":13788,"meta":48,"style":48}," ~/tomcat/webapps/ROOT/META-INF/maven/org.synyx/foo/pom.properties\n",[50,31810,31811],{"__ignoreMap":48},[53,31812,31813],{"class":55,"line":56},[53,31814,31808],{},[18,31816,31817],{},"It looks like this:",[43,31819,31821],{"className":13786,"code":31820,"language":13788,"meta":48,"style":48},"\n#Generated by Maven\n#Sat Mar 30 02:03:52 CET 2013\nversion=1.1.10-SNAPSHOT\ngroupId=org.synyx\nartifactId=foo\n\n",[50,31822,31823,31827,31832,31837,31842,31847],{"__ignoreMap":48},[53,31824,31825],{"class":55,"line":56},[53,31826,500],{"emptyLinePlaceholder":499},[53,31828,31829],{"class":55,"line":86},[53,31830,31831],{},"#Generated by Maven\n",[53,31833,31834],{"class":55,"line":126},[53,31835,31836],{},"#Sat Mar 30 02:03:52 CET 2013\n",[53,31838,31839],{"class":55,"line":163},[53,31840,31841],{},"version=1.1.10-SNAPSHOT\n",[53,31843,31844],{"class":55,"line":186},[53,31845,31846],{},"groupId=org.synyx\n",[53,31848,31849],{"class":55,"line":221},[53,31850,31851],{},"artifactId=foo\n",[18,31853,31854],{},"This .properties file contains all the relevant information we need if we’d like to rollback to the backup state. The\nNexus deploy script takes the group id, artifact id and version as parameters:",[43,31856,31858],{"className":13786,"code":31857,"language":13788,"meta":48,"style":48},"./nexusdeploy.sh -i org.synyx:foo:1.1.10-SNAPSHOT -w ROOT\n",[50,31859,31860],{"__ignoreMap":48},[53,31861,31862],{"class":55,"line":56},[53,31863,31857],{},[18,31865,31866],{},"So this properties file is copied to the backup directory.",[43,31868,31870],{"className":30754,"code":31869,"language":30756,"meta":48,"style":48},"\nPROPS_NAME=\"pom.properties\"\nAPP_PROPS=\"$(find \"$WEBAPP_PATH\" -name \"$PROPS_NAME\")\"\n# more than one file matching the criterias found\nif [ ! $(find \"$WEBAPP_PATH\" -name \"$PROPS_NAME\" | wc -l) -eq 1 ]; then\n echo \"No or more than one $PROPS_NAME was found under the webapp path $(readlink -f \"$WEBAPP_PATH\")\"\n exit 1\nfi\n# copy the information about the current deployed version\ncp \"$APP_PROPS\" \"$FULL_BACKUP_PATH\"\n\n",[50,31871,31872,31876,31886,31914,31919,31966,31988,31994,31998,32003],{"__ignoreMap":48},[53,31873,31874],{"class":55,"line":56},[53,31875,500],{"emptyLinePlaceholder":499},[53,31877,31878,31881,31883],{"class":55,"line":86},[53,31879,31880],{"class":82},"PROPS_NAME",[53,31882,390],{"class":389},[53,31884,31885],{"class":63},"\"pom.properties\"\n",[53,31887,31888,31891,31893,31895,31897,31899,31901,31904,31907,31909,31912],{"class":55,"line":126},[53,31889,31890],{"class":82},"APP_PROPS",[53,31892,390],{"class":389},[53,31894,31169],{"class":63},[53,31896,15077],{"class":59},[53,31898,6452],{"class":63},[53,31900,31393],{"class":82},[53,31902,31903],{"class":63},"\" ",[53,31905,31906],{"class":89},"-name",[53,31908,6452],{"class":63},[53,31910,31911],{"class":82},"$PROPS_NAME",[53,31913,31465],{"class":63},[53,31915,31916],{"class":55,"line":163},[53,31917,31918],{"class":3698},"# more than one file matching the criterias found\n",[53,31920,31921,31923,31925,31927,31930,31932,31934,31936,31938,31941,31943,31945,31947,31949,31952,31955,31957,31960,31962,31964],{"class":55,"line":186},[53,31922,4334],{"class":389},[53,31924,4104],{"class":82},[53,31926,11914],{"class":389},[53,31928,31929],{"class":82}," $(",[53,31931,15077],{"class":59},[53,31933,6452],{"class":63},[53,31935,31393],{"class":82},[53,31937,2385],{"class":63},[53,31939,31940],{"class":89}," -name",[53,31942,6452],{"class":63},[53,31944,31911],{"class":82},[53,31946,2385],{"class":63},[53,31948,6324],{"class":389},[53,31950,31951],{"class":59}," wc",[53,31953,31954],{"class":89}," -l",[53,31956,1665],{"class":82},[53,31958,31959],{"class":389},"-eq",[53,31961,24476],{"class":89},[53,31963,31295],{"class":82},[53,31965,6467],{"class":389},[53,31967,31968,31970,31973,31975,31978,31980,31982,31984,31986],{"class":55,"line":221},[53,31969,31302],{"class":89},[53,31971,31972],{"class":63}," \"No or more than one ",[53,31974,31911],{"class":82},[53,31976,31977],{"class":63}," was found under the webapp path $(",[53,31979,31456],{"class":59},[53,31981,31332],{"class":89},[53,31983,6452],{"class":63},[53,31985,31393],{"class":82},[53,31987,31465],{"class":63},[53,31989,31990,31992],{"class":55,"line":242},[53,31991,31310],{"class":89},[53,31993,4368],{"class":89},[53,31995,31996],{"class":55,"line":273},[53,31997,4373],{"class":389},[53,31999,32000],{"class":55,"line":279},[53,32001,32002],{"class":3698},"# copy the information about the current deployed version\n",[53,32004,32005,32008,32010,32013,32015,32017,32019],{"class":55,"line":496},[53,32006,32007],{"class":59},"cp",[53,32009,6452],{"class":63},[53,32011,32012],{"class":82},"$APP_PROPS",[53,32014,2385],{"class":63},[53,32016,6452],{"class":63},[53,32018,31704],{"class":82},[53,32020,31375],{"class":63},[649,32022,32024],{"id":32023},"save-the-war-file-itself-if-configured","Save the war file itself if configured",[18,32026,32027],{},"We remember, in the configuration file there are three possibilities what to do with the war file during the backup\nprocess:",[577,32029,32030,32033,32036],{},[580,32031,32032],{},"always save the war file",[580,32034,32035],{},"never save the war file",[580,32037,32038],{},"save the war file only if the current deployed version is a snapshot",[18,32040,32041,32042,32044,32045,32048],{},"The first two states are self-explanatory: either the war file is copied to the backup directory or not. If the war\nfile should be only saved if it is a snapshot, the ",[50,32043,31805],{}," file has to be read in and the variable ",[50,32046,32047],{},"$version","\nhas to be checked if it contains the string ‘SNAPSHOT’.",[43,32050,32052],{"className":30754,"code":32051,"language":30756,"meta":48,"style":48},"\n# variable isn't empty\nif [ ! -z \"$WAR\" ]; then\nSAVE_WAR=0\n# decide if war should be saved\n# if conf = save war only if current deployed version is a snapshot\nif [ \"$WAR\" -eq 0 ]; then\n# check if current deployed version is a snapshot\n# read in properties\n. \"$APP_PROPS\"\n# $version has information about current deployed version\ncase \"$version\" in\n *SNAPSHOT)\n SAVE_WAR=1\n ;;\nesac\n# if conf = save war always\nelif [ \"$WAR\" -eq 1 ]; then\n SAVE_WAR=1\nfi\nif [ \"$SAVE_WAR\" -eq 1 ]; then\n echo \"Backup war file: \"\n cp -v \"$WEBAPP_PATH\"/../*.war \"$FULL_BACKUP_PATH\"\nfi\nfi\n\n",[50,32053,32054,32058,32063,32085,32095,32100,32105,32125,32130,32135,32145,32150,32162,32172,32181,32186,32190,32195,32216,32225,32229,32250,32257,32283,32287],{"__ignoreMap":48},[53,32055,32056],{"class":55,"line":56},[53,32057,500],{"emptyLinePlaceholder":499},[53,32059,32060],{"class":55,"line":86},[53,32061,32062],{"class":3698},"# variable isn't empty\n",[53,32064,32065,32067,32069,32071,32074,32076,32079,32081,32083],{"class":55,"line":126},[53,32066,4334],{"class":389},[53,32068,4104],{"class":82},[53,32070,11914],{"class":389},[53,32072,32073],{"class":389}," -z",[53,32075,6452],{"class":63},[53,32077,32078],{"class":82},"$WAR",[53,32080,2385],{"class":63},[53,32082,31295],{"class":82},[53,32084,6467],{"class":389},[53,32086,32087,32090,32092],{"class":55,"line":163},[53,32088,32089],{"class":82},"SAVE_WAR",[53,32091,390],{"class":389},[53,32093,32094],{"class":63},"0\n",[53,32096,32097],{"class":55,"line":186},[53,32098,32099],{"class":3698},"# decide if war should be saved\n",[53,32101,32102],{"class":55,"line":221},[53,32103,32104],{"class":3698},"# if conf = save war only if current deployed version is a snapshot\n",[53,32106,32107,32109,32111,32113,32115,32117,32119,32121,32123],{"class":55,"line":242},[53,32108,4334],{"class":389},[53,32110,4104],{"class":82},[53,32112,2385],{"class":63},[53,32114,32078],{"class":82},[53,32116,2385],{"class":63},[53,32118,4342],{"class":389},[53,32120,4345],{"class":89},[53,32122,31295],{"class":82},[53,32124,6467],{"class":389},[53,32126,32127],{"class":55,"line":273},[53,32128,32129],{"class":3698},"# check if current deployed version is a snapshot\n",[53,32131,32132],{"class":55,"line":279},[53,32133,32134],{"class":3698},"# read in properties\n",[53,32136,32137,32139,32141,32143],{"class":55,"line":496},[53,32138,986],{"class":89},[53,32140,6452],{"class":63},[53,32142,32012],{"class":82},[53,32144,31375],{"class":63},[53,32146,32147],{"class":55,"line":503},[53,32148,32149],{"class":3698},"# $version has information about current deployed version\n",[53,32151,32152,32154,32156,32158,32160],{"class":55,"line":509},[53,32153,31215],{"class":389},[53,32155,6452],{"class":63},[53,32157,32047],{"class":82},[53,32159,2385],{"class":63},[53,32161,31225],{"class":389},[53,32163,32164,32167,32170],{"class":55,"line":515},[53,32165,32166],{"class":389}," *",[53,32168,32169],{"class":31230},"SNAPSHOT",[53,32171,685],{"class":389},[53,32173,32174,32177,32179],{"class":55,"line":521},[53,32175,32176],{"class":82}," SAVE_WAR",[53,32178,390],{"class":389},[53,32180,31058],{"class":63},[53,32182,32183],{"class":55,"line":527},[53,32184,32185],{"class":82}," ;;\n",[53,32187,32188],{"class":55,"line":533},[53,32189,31250],{"class":389},[53,32191,32192],{"class":55,"line":539},[53,32193,32194],{"class":3698},"# if conf = save war always\n",[53,32196,32197,32200,32202,32204,32206,32208,32210,32212,32214],{"class":55,"line":545},[53,32198,32199],{"class":389},"elif",[53,32201,4104],{"class":82},[53,32203,2385],{"class":63},[53,32205,32078],{"class":82},[53,32207,2385],{"class":63},[53,32209,4342],{"class":389},[53,32211,24476],{"class":89},[53,32213,31295],{"class":82},[53,32215,6467],{"class":389},[53,32217,32218,32221,32223],{"class":55,"line":2414},[53,32219,32220],{"class":82}," SAVE_WAR",[53,32222,390],{"class":389},[53,32224,31058],{"class":63},[53,32226,32227],{"class":55,"line":2426},[53,32228,4373],{"class":389},[53,32230,32231,32233,32235,32237,32240,32242,32244,32246,32248],{"class":55,"line":2438},[53,32232,4334],{"class":389},[53,32234,4104],{"class":82},[53,32236,2385],{"class":63},[53,32238,32239],{"class":82},"$SAVE_WAR",[53,32241,2385],{"class":63},[53,32243,4342],{"class":389},[53,32245,24476],{"class":89},[53,32247,31295],{"class":82},[53,32249,6467],{"class":389},[53,32251,32252,32254],{"class":55,"line":2451},[53,32253,31302],{"class":89},[53,32255,32256],{"class":63}," \"Backup war file: \"\n",[53,32258,32259,32262,32264,32266,32268,32271,32274,32277,32279,32281],{"class":55,"line":2459},[53,32260,32261],{"class":59}," cp",[53,32263,8667],{"class":89},[53,32265,6452],{"class":63},[53,32267,31393],{"class":82},[53,32269,32270],{"class":63},"\"/../",[53,32272,32273],{"class":89},"*",[53,32275,32276],{"class":63},".war",[53,32278,6452],{"class":63},[53,32280,31704],{"class":82},[53,32282,31375],{"class":63},[53,32284,32285],{"class":55,"line":2470},[53,32286,4373],{"class":389},[53,32288,32289],{"class":55,"line":2476},[53,32290,4373],{"class":389},[649,32292,32294],{"id":32293},"save-the-database-data","Save the database data",[18,32296,32297,32298,32300],{},"For every element in ",[50,32299,31425],{}," a dump is created.",[18,32302,32303,32304,32306,32307,32309,32310,32313,32314,32317],{},"Due to the existing ",[50,32305,30744],{}," with access data information, ",[50,32308,30736],{}," can be called without the parameters ",[50,32311,32312],{},"user"," and\n",[50,32315,32316],{},"password",". However it needs the following information:",[577,32319,32320,32326,32332],{},[580,32321,32322,32323,8780],{},"the path to the config file with the access data (",[50,32324,32325],{},"--defaults-file",[580,32327,32328,32329,8780],{},"the suffix (database name) which access data section of the config file should be used (",[50,32330,32331],{},"--defaults-group-suffix",[580,32333,32334],{},"the database name to know which database should be dumped",[18,32336,32337,32338,32341,32342,32345,32346,32348],{},"If something goes wrong during database dump (e.g. access is denied), the error is redirected to ",[50,32339,32340],{},"/dev/null"," because the\nscript has its own error handling. It checks if ",[50,32343,32344],{},"$?"," is 0 or not. ",[50,32347,32344],{}," gives the exit status of the last executed\ncommand. A status not 0 is an error code, i.e. the last command wasn’t successful.",[43,32350,32352],{"className":30754,"code":32351,"language":30756,"meta":48,"style":48},"\nfor db in $DB_NAME\ndo\n # create dump and hide mysqldump errors due to own error handling\n mysqldump --defaults-file=\"$MYSQL_CONF\" --defaults-group-suffix=\"$db\" \"$db\" > \"$FULL_BACKUP_PATH/$db-$CURRENT_DATE.dump.sql\" 2>/dev/null\n # creating dump was successful, so return value is 0\n if [ \"$?\" -eq 0 ]; then\n echo \"Create dump for database $db: $(readlink -f \"$FULL_BACKUP_PATH/$db-$CURRENT_DATE.dump.sql\")\"\n # something went wrong while trying to create dump (e.g. access denied), so return value is not 0 but any other number\n else\n echo \"Problems encountered while trying to create dump for database $db\"\n fi\ndone\n\n",[50,32353,32354,32358,32372,32376,32381,32435,32440,32460,32491,32496,32501,32512,32517],{"__ignoreMap":48},[53,32355,32356],{"class":55,"line":56},[53,32357,500],{"emptyLinePlaceholder":499},[53,32359,32360,32363,32366,32369],{"class":55,"line":86},[53,32361,32362],{"class":389},"for",[53,32364,32365],{"class":82}," db ",[53,32367,32368],{"class":389},"in",[53,32370,32371],{"class":82}," $DB_NAME\n",[53,32373,32374],{"class":55,"line":126},[53,32375,31210],{"class":389},[53,32377,32378],{"class":55,"line":163},[53,32379,32380],{"class":3698}," # create dump and hide mysqldump errors due to own error handling\n",[53,32382,32383,32386,32389,32391,32393,32395,32398,32400,32403,32405,32407,32409,32411,32414,32416,32418,32420,32422,32424,32426,32429,32432],{"class":55,"line":186},[53,32384,32385],{"class":59}," mysqldump",[53,32387,32388],{"class":89}," --defaults-file=",[53,32390,2385],{"class":63},[53,32392,31440],{"class":82},[53,32394,2385],{"class":63},[53,32396,32397],{"class":89}," --defaults-group-suffix=",[53,32399,2385],{"class":63},[53,32401,32402],{"class":82},"$db",[53,32404,2385],{"class":63},[53,32406,6452],{"class":63},[53,32408,32402],{"class":82},[53,32410,2385],{"class":63},[53,32412,32413],{"class":389}," >",[53,32415,6452],{"class":63},[53,32417,31704],{"class":82},[53,32419,4422],{"class":63},[53,32421,32402],{"class":82},[53,32423,8653],{"class":63},[53,32425,31625],{"class":82},[53,32427,32428],{"class":63},".dump.sql\"",[53,32430,32431],{"class":389}," 2>",[53,32433,32434],{"class":63},"/dev/null\n",[53,32436,32437],{"class":55,"line":221},[53,32438,32439],{"class":3698}," # creating dump was successful, so return value is 0\n",[53,32441,32442,32444,32446,32448,32450,32452,32454,32456,32458],{"class":55,"line":242},[53,32443,6749],{"class":389},[53,32445,4104],{"class":82},[53,32447,2385],{"class":63},[53,32449,32344],{"class":89},[53,32451,2385],{"class":63},[53,32453,4342],{"class":389},[53,32455,4345],{"class":89},[53,32457,31295],{"class":82},[53,32459,6467],{"class":389},[53,32461,32462,32464,32467,32469,32472,32474,32476,32478,32480,32482,32484,32486,32488],{"class":55,"line":273},[53,32463,4357],{"class":89},[53,32465,32466],{"class":63}," \"Create dump for database ",[53,32468,32402],{"class":82},[53,32470,32471],{"class":63},": $(",[53,32473,31456],{"class":59},[53,32475,31332],{"class":89},[53,32477,6452],{"class":63},[53,32479,31704],{"class":82},[53,32481,4422],{"class":63},[53,32483,32402],{"class":82},[53,32485,8653],{"class":63},[53,32487,31625],{"class":82},[53,32489,32490],{"class":63},".dump.sql\")\"\n",[53,32492,32493],{"class":55,"line":279},[53,32494,32495],{"class":3698}," # something went wrong while trying to create dump (e.g. access denied), so return value is not 0 but any other number\n",[53,32497,32498],{"class":55,"line":496},[53,32499,32500],{"class":389}," else\n",[53,32502,32503,32505,32508,32510],{"class":55,"line":503},[53,32504,4357],{"class":89},[53,32506,32507],{"class":63}," \"Problems encountered while trying to create dump for database ",[53,32509,32402],{"class":82},[53,32511,31375],{"class":63},[53,32513,32514],{"class":55,"line":509},[53,32515,32516],{"class":389}," fi\n",[53,32518,32519],{"class":55,"line":515},[53,32520,31255],{"class":389},[649,32522,32524],{"id":32523},"save-specified-files-andor-directories","Save specified files and/or directories",[18,32526,32527],{},"To move the specified files and/or directories to the backup directory, following steps are performed:",[577,32529,32530,32537,32544],{},[580,32531,32532,32533,32536],{},"make sure that the variable ",[50,32534,32535],{},"$CONTENT_TO_BE_MOVED"," isn’t empty",[580,32538,32539,32540,32543],{},"execute the ",[50,32541,32542],{},"move"," operation for each element",[580,32545,32546],{},"if an element doesn’t exist, print an error message",[43,32548,32550],{"className":30754,"code":32549,"language":30756,"meta":48,"style":48},"\n# make sure that variable $CONTENT_TO_BE_MOVED isn't empty\nif [ ! -z \"$CONTENT_TO_BE_MOVED\" ]; then\nfor i in $CONTENT_TO_BE_MOVED\ndo\n # if the given content is a dir or a file, process else throw an error\n if [ -d \"$i\" ] || [ -f \"$i\" ]; then\n echo \"Move $(readlink -f \"$i\") into backup dir\"\n mv \"$i\" \"$FULL_BACKUP_PATH\"\n else\n echo \"Can't move $i: doesn't exist\";\n fi\ndone\nfi\n\n",[50,32551,32552,32556,32561,32581,32593,32597,32602,32636,32654,32671,32675,32689,32693,32697],{"__ignoreMap":48},[53,32553,32554],{"class":55,"line":56},[53,32555,500],{"emptyLinePlaceholder":499},[53,32557,32558],{"class":55,"line":86},[53,32559,32560],{"class":3698},"# make sure that variable $CONTENT_TO_BE_MOVED isn't empty\n",[53,32562,32563,32565,32567,32569,32571,32573,32575,32577,32579],{"class":55,"line":126},[53,32564,4334],{"class":389},[53,32566,4104],{"class":82},[53,32568,11914],{"class":389},[53,32570,32073],{"class":389},[53,32572,6452],{"class":63},[53,32574,32535],{"class":82},[53,32576,2385],{"class":63},[53,32578,31295],{"class":82},[53,32580,6467],{"class":389},[53,32582,32583,32585,32588,32590],{"class":55,"line":163},[53,32584,32362],{"class":389},[53,32586,32587],{"class":82}," i ",[53,32589,32368],{"class":389},[53,32591,32592],{"class":82}," $CONTENT_TO_BE_MOVED\n",[53,32594,32595],{"class":55,"line":186},[53,32596,31210],{"class":389},[53,32598,32599],{"class":55,"line":221},[53,32600,32601],{"class":3698}," # if the given content is a dir or a file, process else throw an error\n",[53,32603,32604,32606,32608,32610,32612,32615,32617,32619,32621,32623,32626,32628,32630,32632,32634],{"class":55,"line":242},[53,32605,6749],{"class":389},[53,32607,4104],{"class":82},[53,32609,6226],{"class":389},[53,32611,6452],{"class":63},[53,32613,32614],{"class":82},"$i",[53,32616,2385],{"class":63},[53,32618,31398],{"class":82},[53,32620,31401],{"class":389},[53,32622,4104],{"class":82},[53,32624,32625],{"class":389},"-f",[53,32627,6452],{"class":63},[53,32629,32614],{"class":82},[53,32631,2385],{"class":63},[53,32633,31295],{"class":82},[53,32635,6467],{"class":389},[53,32637,32638,32640,32643,32645,32647,32649,32651],{"class":55,"line":273},[53,32639,4357],{"class":89},[53,32641,32642],{"class":63}," \"Move $(",[53,32644,31456],{"class":59},[53,32646,31332],{"class":89},[53,32648,6452],{"class":63},[53,32650,32614],{"class":82},[53,32652,32653],{"class":63},"\") into backup dir\"\n",[53,32655,32656,32659,32661,32663,32665,32667,32669],{"class":55,"line":279},[53,32657,32658],{"class":59}," mv",[53,32660,6452],{"class":63},[53,32662,32614],{"class":82},[53,32664,2385],{"class":63},[53,32666,6452],{"class":63},[53,32668,31704],{"class":82},[53,32670,31375],{"class":63},[53,32672,32673],{"class":55,"line":496},[53,32674,32500],{"class":389},[53,32676,32677,32679,32682,32684,32687],{"class":55,"line":503},[53,32678,4357],{"class":89},[53,32680,32681],{"class":63}," \"Can't move ",[53,32683,32614],{"class":82},[53,32685,32686],{"class":63},": doesn't exist\"",[53,32688,1727],{"class":82},[53,32690,32691],{"class":55,"line":509},[53,32692,32516],{"class":389},[53,32694,32695],{"class":55,"line":515},[53,32696,31255],{"class":389},[53,32698,32699],{"class":55,"line":521},[53,32700,4373],{"class":389},[18,32702,32703,32704,32707,32708,32711],{},"These steps are similar for the files/directories to be copied, only the action (",[50,32705,32706],{},"copy",") and the parameter (\n",[50,32709,32710],{},"$CONTENT_TO_BE_COPIED",") are different.",[649,32713,32715],{"id":32714},"dont-forget-to-make-your-script-executable","Don’t forget to make your script executable 😉",[43,32717,32719],{"className":13786,"code":32718,"language":13788,"meta":48,"style":48},"chmod +x backup.sh\n",[50,32720,32721],{"__ignoreMap":48},[53,32722,32723],{"class":55,"line":56},[53,32724,32718],{},[2207,32726,32728],{"id":32727},"automation","Automation",[18,32730,32731],{},"Now it’s no longer required to do the backup manually. 🙂 And even better: if this script is integrated into our existing\nautomatic deployment script, every time a deployment is performed, a backup will be performed first.",[2207,32733,32735],{"id":32734},"links","Links",[18,32737,32738],{},[585,32739,32742],{"href":30722,"rel":32740,"title":32741},[589],"Github Project","Github Project: automated-backup",[18,32744,32745],{},[585,32746,32749],{"href":30675,"rel":32747,"title":32748},[589],"Link to opening quote","Link to opening quote: Continuous Delivery vs. Continuous Deployment vs. Continuous-Integration",[607,32751,32752],{},"html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sJ8bj, html code.shiki .sJ8bj{--shiki-default:#6A737D;--shiki-dark:#6A737D}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .sA_wV, html code.shiki .sA_wV{--shiki-default:#032F62;--shiki-dark:#DBEDFF}",{"title":48,"searchDepth":86,"depth":86,"links":32754},[32755,32756,32757,32758,32759,32769,32770],{"id":30661,"depth":86,"text":30662},{"id":30680,"depth":86,"text":30681},{"id":30726,"depth":86,"text":30727},{"id":30888,"depth":86,"text":30889},{"id":31113,"depth":86,"text":31114,"children":32760},[32761,32762,32763,32764,32765,32766,32767,32768],{"id":31120,"depth":126,"text":31121},{"id":31258,"depth":126,"text":31259},{"id":31582,"depth":126,"text":31583},{"id":31798,"depth":126,"text":31799},{"id":32023,"depth":126,"text":32024},{"id":32293,"depth":126,"text":32294},{"id":32523,"depth":126,"text":32524},{"id":32714,"depth":126,"text":32715},{"id":32727,"depth":86,"text":32728},{"id":32734,"depth":86,"text":32735},[32772,613],"azubi-blog","2013-04-10T12:01:45","https://synyx.de/blog/continuous-deployment-automatic-backup-script/",{},"/blog/continuous-deployment-automatic-backup-script",{"title":30651,"description":48},"blog/continuous-deployment-automatic-backup-script",[32727,4836,32780,32781,30736,32782],"continuous-delivery","continuous-deployment","shell-script","A few words about Continuous Deployment Continuous Deployment is the deployment or release of code to Production as soon as it is ready. (…) The automated process is key because…","Hn5KrGdZNvCGjjquVQO6wQSlqBJmAcHF-Ov442gIBu4",{"id":32786,"title":32787,"author":32788,"body":32789,"category":32972,"date":32973,"description":32974,"extension":617,"link":32975,"meta":32976,"navigation":499,"path":32977,"seo":32978,"slug":32980,"stem":32981,"tags":32982,"teaser":32987,"__hash__":32988},"blog/blog/selenium-grid-windows.md","Acceptance testing at synyx – Part 4",[12981],{"type":11,"value":32790,"toc":32964},[32791,32794,32809,32812,32816,32819,32832,32842,32873,32885,32894,32901,32915,32919,32922,32929,32933,32936,32939,32942,32946,32952,32955,32959,32962],[14,32792,32787],{"id":32793},"acceptance-testing-at-synyx-part-4",[18,32795,32796,32797,32802,32803,32808],{},"In the last posts we set up our infrastructure to be able\nto ",[585,32798,32801],{"href":32799,"rel":32800},"http://blog.synyx.de/2013/01/remote-browsers/",[589],"aquire Browsers that run on a remote host"," and we created\na ",[585,32804,32807],{"href":32805,"rel":32806},"http://blog.synyx.de/2013/02/setup-selenium-grid/",[589],"selenium Grid infrastructure"," that is scalable and reboot safe.",[18,32810,32811],{},"Within this post I want to describe how to extend the grid with windows nodes to be able to test on our beloved Internet\nExplorer.",[2207,32813,32815],{"id":32814},"adding-windows-nodes-to-the-grid","Adding windows nodes to the grid",[18,32817,32818],{},"To be able to run tests on Windows browsers I wanted to go a similar way as on the linux-machines: As soon as the\nmachine boots the selenium-server should get started and registers itself to the selenium-hub (set up like mentioned\nbefore).",[18,32820,32821,32822,32825,32826,32831],{},"First I installed a current version of Java JRE and created a folder ",[50,32823,32824],{},"C:\\selenium"," where I put all the needed files to (\nnode-config, selenium-server\njar, ",[585,32827,32830],{"href":32828,"rel":32829},"http://code.google.com/p/selenium/wiki/InternetExplorerDriver",[589],"InternetExplorerDriver"," and so on.",[18,32833,32834,32835,32838,32839,4101],{},"Then I created a startup-script ",[50,32836,32837],{},"selenium-server.bat"," which basically executes the selenium-server process the way the\nlinux-startscripts do and put it to ",[50,32840,32841],{},"C:\\selenium\\selenium-server.bat",[43,32843,32847],{"className":32844,"code":32845,"language":32846,"meta":48,"style":48},"language-cmd shiki shiki-themes github-light github-dark","\n\"C:\\Program Files (x86)\\Java\\jre7\"\\bin\\java\n -Dwebdriver.ie.driver=C:\\selenium\\IEDriverServer.exe\n -jar C:\\selenium\\selenium-server-standalone-2.28.0.jar\n -role node -nodeConfig C:\\selenium\\nodeconfig.json\n\n","cmd",[50,32848,32849,32853,32858,32863,32868],{"__ignoreMap":48},[53,32850,32851],{"class":55,"line":56},[53,32852,500],{"emptyLinePlaceholder":499},[53,32854,32855],{"class":55,"line":86},[53,32856,32857],{},"\"C:\\Program Files (x86)\\Java\\jre7\"\\bin\\java\n",[53,32859,32860],{"class":55,"line":126},[53,32861,32862],{}," -Dwebdriver.ie.driver=C:\\selenium\\IEDriverServer.exe\n",[53,32864,32865],{"class":55,"line":163},[53,32866,32867],{}," -jar C:\\selenium\\selenium-server-standalone-2.28.0.jar\n",[53,32869,32870],{"class":55,"line":186},[53,32871,32872],{}," -role node -nodeConfig C:\\selenium\\nodeconfig.json\n",[18,32874,10847,32875,32878,32879,32884],{},[50,32876,32877],{},"nodeconfig.json"," looks similar to the linux configurations except that the capabilities-section tells the hub\nthere are Internet Explorers.",[585,32880,32883],{"href":32881,"rel":32882},"https://media.synyx.de/uploads//2013/02/nodeconfig.json_.txt",[589],"Click here"," to see it.\nCurrently we dont add Chromes and Firefoxes to the hub but this would be possible.",[18,32886,32887,32888,32893],{},"In order to run the script as a service I used ",[585,32889,32892],{"href":32890,"rel":32891},"http://nssm.cc/",[589],"Non-Sucking Service Manager"," which registers to\nWindows’ services and reduces the amount of clicking that has to be done. It also takes care of restarting the service\nif it hangs and so on.",[18,32895,32896,32897,32900],{},"To get this done in Win7 you have to open an “Adminstrator Commandline” by clicking Start, enter cmd into the search\nfield and execute with ",[50,32898,32899],{},"CTRL+SHIFT+Return",". Then nssm can be used to install the service:",[43,32902,32904],{"className":32844,"code":32903,"language":32846,"meta":48,"style":48},"\nC:\\selenium\\nssm.exe install selenium-server C:\\selenium\\selenium-server.bat\n\n",[50,32905,32906,32910],{"__ignoreMap":48},[53,32907,32908],{"class":55,"line":56},[53,32909,500],{"emptyLinePlaceholder":499},[53,32911,32912],{"class":55,"line":86},[53,32913,32914],{},"C:\\selenium\\nssm.exe install selenium-server C:\\selenium\\selenium-server.bat\n",[649,32916,32918],{"id":32917},"service-accessing-the-desktop","Service accessing the desktop",[18,32920,32921],{},"Because selenium-server needs to access the desktop you have to allow this by clicking your way though: Navigate\nthrough Controlpanel -> Services -> selenium-server -> Properties.",[18,32923,32924,32925,32928],{},"There go to the “Log On” tab and check ",[50,32926,32927],{},"[x] allow service to interact with desktop",". Start the service or reboot the\nmachine and you’re done. As soon as the service is started it should show up at the console of your selenium-hub node.",[649,32930,32932],{"id":32931},"problem-1-uploading-files","Problem 1: Uploading files",[18,32934,32935],{},"Unfortunately we experienced a problem with the described solution: As soon as one of our selenium tests included a file\nupload a strange popup showed up in Internet Exporer that prevented the tests from succeeding.",[18,32937,32938],{},"Actually selenium does really nice work when it comes to uploading files: If a path to a file is entered into an\ninput-field WebDriver detects this and (in case of RemoteWebDriver) uploads it to a temporary file on the\nselenium-node. Then it “chooses” this file on the file-chooser in the browser. But somehow the corresponding dialog in\nInternet Explorer causes problems when the process is not running as a “real” user. This is probably because the\nservice-user has no “home-directory” where the file-upload dialog could be initialized to. And this causes an\nerror-popup which makes the test fail.",[18,32940,32941],{},"We fixed this by taking another approach: The service can be configured not to run as the service-user but as a given\nwindows user. As soon as we used this option the popup was gone and the tests that used file uploads went back to green.",[649,32943,32945],{"id":32944},"problem-2-performance","Problem 2: Performance",[18,32947,32948,32949,32951],{},"Now our Windows7 node has some strange performance issues which are probably related to the “accessing the desktop”\nfeature. The tests run significantly slower (like factor 5 to 10 times) when the service is run as non-service user as\ndescribed in the solution for the upload problem. This also is the case when the service runs with\n",[50,32950,32927],{}," AND noone is logged in to the VM by rdesktop.",[18,32953,32954],{},"Currently we are investigating this issue and trying to work on a solution. A workaround to this could be keeping the\ncorresponding display session open somehow (e.g. by enabling VNC). As soon as we found a solution for this problem I’ll\npost an update.",[2207,32956,32958],{"id":32957},"coming-up-next","Coming up next…",[18,32960,32961],{},"This post completes (at least for now ;-)) all the technical stuff. Next we will look at what an acceptance test is,\nhow we define acceptance criteria and ultimately how we write and report these tests.",[607,32963,989],{},{"title":48,"searchDepth":86,"depth":86,"links":32965},[32966,32971],{"id":32814,"depth":86,"text":32815,"children":32967},[32968,32969,32970],{"id":32917,"depth":126,"text":32918},{"id":32931,"depth":126,"text":32932},{"id":32944,"depth":126,"text":32945},{"id":32957,"depth":86,"text":32958},[613],"2013-02-27T08:48:44","In the last posts we set up our infrastructure to be able\\nto aquire Browsers that run on a remote host and we created\\na selenium Grid infrastructure that is scalable and reboot safe.","https://synyx.de/blog/selenium-grid-windows/",{},"/blog/selenium-grid-windows",{"title":32787,"description":32979},"In the last posts we set up our infrastructure to be able\nto aquire Browsers that run on a remote host and we created\na selenium Grid infrastructure that is scalable and reboot safe.","selenium-grid-windows","blog/selenium-grid-windows",[32983,32984,32985,32986],"grid","rdekstop","selenium","windows","In the last posts we set up our infrastructure to be able to aquire Browsers that run on a remote host and we created a selenium Grid infrastructure that is…","D1h3MEKcBwrW8ZdU1R_Cqbe-Afqnj3OK58tyadQVVz0",{"id":32990,"title":32991,"author":32992,"body":32994,"category":34219,"date":34220,"description":34221,"extension":617,"link":34222,"meta":34223,"navigation":499,"path":34224,"seo":34225,"slug":32998,"stem":34227,"tags":34228,"teaser":34231,"__hash__":34232},"blog/blog/monitoring-nih-style-part-2.md","Monitoring – NIH style (part 2)",[32993],"jbuch",{"type":11,"value":32995,"toc":34213},[32996,32999,33009,33012,33016,33019,33035,33050,33054,33064,33067,33072,33075,33083,33086,33094,33097,33100,33335,33338,33347,33428,33431,33434,33536,33539,33542,33587,33601,33604,34073,34083,34092,34102,34105,34111,34114,34117,34120,34139,34151,34158,34161,34172,34178,34181,34190,34193,34207,34210],[14,32997,32991],{"id":32998},"monitoring-nih-style-part-2",[18,33000,33001,33002,33008],{},"This expands on the idea in\nthe ",[585,33003,33007],{"href":33004,"rel":33005,"title":33006},"http://blog.synyx.de/2013/02/monitoring-nih-style",[589],"Part 1","first part of this blog series",". We will still be\nworking NIH style here – this time to improve the visuals, user-interface and information density.",[18,33010,33011],{},"The idea still is:collect arbitrary information, stay small, display distilled information. The goal is to learn more\nhow to visualize things, and of course do it within the constraints mentioned in the previous blog entry.",[2207,33013,33015],{"id":33014},"prototype-2","Prototype #2",[18,33017,33018],{},"General idea:",[577,33020,33021,33029,33032],{},[580,33022,33023,33024,8780],{},"Web interface (",[585,33025,33028],{"href":33026,"rel":33027,"title":33028},"https://web.archive.org/web/20130301163513/http://square.github.com:80/cubism/",[589],"Cubism",[580,33030,33031],{},"real database",[580,33033,33034],{},"real programming language",[18,33036,33037,33038,33043,33044,33049],{},"Cubism (also as recently introduced into ",[585,33039,33042],{"href":33040,"rel":33041,"title":33042},"http://www.jolokia.org/",[589],"Jolokia",") is a wonderful Javascript library\nusing ",[585,33045,33048],{"href":33046,"rel":33047,"title":33048},"http://d3js.org/",[589],"D3"," to plot information in a browser window as horizon graphs. It features automatic\nupdates to the graphs – for which we will write a server-side application. As with before, environmental restrictions\napply (I would have loved to use Jolokia, but it is not available everywhere), so we will roll our own.",[649,33051,33053],{"id":33052},"client","Client",[18,33055,33056,33057,33063],{},"While the documentation to cubism isn’t really bad, it lacks a little on the “how do I start to use it” side. The best\nsource for information I found to be the HTML source of the main cubism site. For further information\nthe ",[585,33058,33062],{"href":33059,"rel":33060,"title":33061},"https://github.com/square/cubism/wiki/API-Reference",[589],"Cubism API Reference","API documentation"," on the Cubism wiki\nis immensely useful.",[18,33065,33066],{},"Parts of a Cubism page:",[577,33068,33069],{},[580,33070,33071],{},"A metric definition",[18,33073,33074],{},"This is a Javascript function which feeds a cubism callback with an array of values. It gets a start and stop-date and\na stepping (time between updates/granularity of information).",[577,33076,33077,33080],{},[580,33078,33079],{},"An HTML element on which cubism can append the graphs to.",[580,33081,33082],{},"A cubism context, which sets up cubism-specifics.",[18,33084,33085],{},"Here the graphs are set up, most importantly how much information will be displayed.",[577,33087,33088,33091],{},[580,33089,33090],{},"The metric instances themselves",[580,33092,33093],{},"A set of d3 commands to put the graphs inside the specified HTML element.",[18,33095,33096],{},"For brevity I will try to limit all code here to the really necessary parts while being verbose enough that crucial\ninformation does not get lost.",[18,33098,33099],{},"HTML parts:",[43,33101,33103],{"className":7103,"code":33102,"language":7105,"meta":48,"style":48},"\u003Cstyle>\n @import url(//square.github.com/cubism/style.css);\n #mymetric {\n min-height: 155px;\n }\n\u003C/style>\n\u003Cdiv id=\"body\">\n \u003Ch2>Metric:\u003C/h2>\n \u003Cdiv id=\"mymetric\">\u003C/div>\n\u003C/div>\n\u003Cscript type=\"text/javascript\" src=\"http://d3js.org/d3.v2.js\">\u003C/script>\n\u003Cscript\n type=\"text/javascript\"\n src=\"http://raw.github.com/square/cubism/master/cubism.v1.js\"\n>\u003C/script>\n\u003Cscript\n type=\"text/javascript\"\n src=\"http://code.jquery.com/jquery-1.8.2.min.js\"\n>\u003C/script>\n\u003Cscript type=\"text/javascript\" src=\"mymetrics.js\">\u003C/script>\n",[50,33104,33105,33113,33128,33135,33150,33154,33162,33177,33190,33209,33217,33244,33251,33261,33271,33279,33285,33293,33302,33310],{"__ignoreMap":48},[53,33106,33107,33109,33111],{"class":55,"line":56},[53,33108,7112],{"class":82},[53,33110,607],{"class":7115},[53,33112,7134],{"class":82},[53,33114,33115,33118,33121,33123,33126],{"class":55,"line":86},[53,33116,33117],{"class":389}," @import",[53,33119,33120],{"class":89}," url",[53,33122,1067],{"class":82},[53,33124,33125],{"class":6186},"//square.github.com/cubism/style.css",[53,33127,1079],{"class":82},[53,33129,33130,33133],{"class":55,"line":126},[53,33131,33132],{"class":59}," #mymetric",[53,33134,6176],{"class":82},[53,33136,33137,33140,33142,33145,33148],{"class":55,"line":163},[53,33138,33139],{"class":89}," min-height",[53,33141,2246],{"class":82},[53,33143,33144],{"class":89},"155",[53,33146,33147],{"class":389},"px",[53,33149,1727],{"class":82},[53,33151,33152],{"class":55,"line":186},[53,33153,7384],{"class":82},[53,33155,33156,33158,33160],{"class":55,"line":221},[53,33157,7232],{"class":82},[53,33159,607],{"class":7115},[53,33161,7134],{"class":82},[53,33163,33164,33166,33168,33170,33172,33175],{"class":55,"line":242},[53,33165,7112],{"class":82},[53,33167,7091],{"class":7115},[53,33169,7249],{"class":59},[53,33171,390],{"class":82},[53,33173,33174],{"class":63},"\"body\"",[53,33176,7134],{"class":82},[53,33178,33179,33181,33183,33186,33188],{"class":55,"line":273},[53,33180,7139],{"class":82},[53,33182,2207],{"class":7115},[53,33184,33185],{"class":82},">Metric:\u003C/",[53,33187,2207],{"class":7115},[53,33189,7134],{"class":82},[53,33191,33192,33194,33196,33198,33200,33203,33205,33207],{"class":55,"line":279},[53,33193,7139],{"class":82},[53,33195,7091],{"class":7115},[53,33197,7249],{"class":59},[53,33199,390],{"class":82},[53,33201,33202],{"class":63},"\"mymetric\"",[53,33204,7256],{"class":82},[53,33206,7091],{"class":7115},[53,33208,7134],{"class":82},[53,33210,33211,33213,33215],{"class":55,"line":496},[53,33212,7232],{"class":82},[53,33214,7091],{"class":7115},[53,33216,7134],{"class":82},[53,33218,33219,33221,33223,33225,33227,33230,33233,33235,33238,33240,33242],{"class":55,"line":503},[53,33220,7112],{"class":82},[53,33222,6883],{"class":7115},[53,33224,7145],{"class":59},[53,33226,390],{"class":82},[53,33228,33229],{"class":63},"\"text/javascript\"",[53,33231,33232],{"class":59}," src",[53,33234,390],{"class":82},[53,33236,33237],{"class":63},"\"http://d3js.org/d3.v2.js\"",[53,33239,7256],{"class":82},[53,33241,6883],{"class":7115},[53,33243,7134],{"class":82},[53,33245,33246,33248],{"class":55,"line":509},[53,33247,7112],{"class":82},[53,33249,33250],{"class":7115},"script\n",[53,33252,33253,33256,33258],{"class":55,"line":515},[53,33254,33255],{"class":59}," type",[53,33257,390],{"class":82},[53,33259,33260],{"class":63},"\"text/javascript\"\n",[53,33262,33263,33266,33268],{"class":55,"line":521},[53,33264,33265],{"class":59}," src",[53,33267,390],{"class":82},[53,33269,33270],{"class":63},"\"http://raw.github.com/square/cubism/master/cubism.v1.js\"\n",[53,33272,33273,33275,33277],{"class":55,"line":527},[53,33274,7256],{"class":82},[53,33276,6883],{"class":7115},[53,33278,7134],{"class":82},[53,33280,33281,33283],{"class":55,"line":533},[53,33282,7112],{"class":82},[53,33284,33250],{"class":7115},[53,33286,33287,33289,33291],{"class":55,"line":539},[53,33288,33255],{"class":59},[53,33290,390],{"class":82},[53,33292,33260],{"class":63},[53,33294,33295,33297,33299],{"class":55,"line":545},[53,33296,33265],{"class":59},[53,33298,390],{"class":82},[53,33300,33301],{"class":63},"\"http://code.jquery.com/jquery-1.8.2.min.js\"\n",[53,33303,33304,33306,33308],{"class":55,"line":2414},[53,33305,7256],{"class":82},[53,33307,6883],{"class":7115},[53,33309,7134],{"class":82},[53,33311,33312,33314,33316,33318,33320,33322,33324,33326,33329,33331,33333],{"class":55,"line":2426},[53,33313,7112],{"class":82},[53,33315,6883],{"class":7115},[53,33317,7145],{"class":59},[53,33319,390],{"class":82},[53,33321,33229],{"class":63},[53,33323,33232],{"class":59},[53,33325,390],{"class":82},[53,33327,33328],{"class":63},"\"mymetrics.js\"",[53,33330,7256],{"class":82},[53,33332,6883],{"class":7115},[53,33334,7134],{"class":82},[18,33336,33337],{},"This will set up your basic structure and can be prettified as much as you care to.",[18,33339,33340,33341,4101],{},"Next up is configuring the\ncubism ",[585,33342,33346],{"href":33343,"rel":33344,"title":33345},"https://github.com/square/cubism/wiki/Cubism#wiki-context",[589],"Cubism Context API","context",[43,33348,33350],{"className":14754,"code":33349,"language":14756,"meta":48,"style":48},"var context = cubism\n .context()\n .serverDelay(500)\n .clientDelay(100)\n .step(10e3)\n .size(960);\n",[50,33351,33352,33364,33372,33386,33400,33414],{"__ignoreMap":48},[53,33353,33354,33356,33359,33361],{"class":55,"line":56},[53,33355,24067],{"class":389},[53,33357,33358],{"class":82}," context ",[53,33360,390],{"class":389},[53,33362,33363],{"class":82}," cubism\n",[53,33365,33366,33368,33370],{"class":55,"line":86},[53,33367,15241],{"class":82},[53,33369,33346],{"class":59},[53,33371,15405],{"class":82},[53,33373,33374,33376,33379,33381,33384],{"class":55,"line":126},[53,33375,15241],{"class":82},[53,33377,33378],{"class":59},"serverDelay",[53,33380,1067],{"class":82},[53,33382,33383],{"class":89},"500",[53,33385,685],{"class":82},[53,33387,33388,33390,33393,33395,33398],{"class":55,"line":163},[53,33389,15241],{"class":82},[53,33391,33392],{"class":59},"clientDelay",[53,33394,1067],{"class":82},[53,33396,33397],{"class":89},"100",[53,33399,685],{"class":82},[53,33401,33402,33404,33407,33409,33412],{"class":55,"line":186},[53,33403,15241],{"class":82},[53,33405,33406],{"class":59},"step",[53,33408,1067],{"class":82},[53,33410,33411],{"class":89},"10e3",[53,33413,685],{"class":82},[53,33415,33416,33418,33421,33423,33426],{"class":55,"line":221},[53,33417,15241],{"class":82},[53,33419,33420],{"class":59},"size",[53,33422,1067],{"class":82},[53,33424,33425],{"class":89},"960",[53,33427,1079],{"class":82},[18,33429,33430],{},"This specifies that the server and client do not react instantly, that we only update the graph once each 10 seconds and\nthat at most we will display 960 data-points.",[18,33432,33433],{},"I will leave on how to generate the metrics as the last point, as that is the part where I had the most problems with.\nBut here is how we will generate some metric to be used:",[43,33435,33437],{"className":14754,"code":33436,"language":14756,"meta":48,"style":48},"var site = \"TST\";\nvar metricGenerator = metricGeneratorForHost(\"localhost:8080\");\nvar mReqTime = metricGenerator.metric(site, \"maxRequestTime\").divide(1000);\nmReqTime.toString = function () {\n return site + \" maxReqT\";\n};\n",[50,33438,33439,33453,33472,33505,33519,33532],{"__ignoreMap":48},[53,33440,33441,33443,33446,33448,33451],{"class":55,"line":56},[53,33442,24067],{"class":389},[53,33444,33445],{"class":82}," site ",[53,33447,390],{"class":389},[53,33449,33450],{"class":63}," \"TST\"",[53,33452,1727],{"class":82},[53,33454,33455,33457,33460,33462,33465,33467,33470],{"class":55,"line":86},[53,33456,24067],{"class":389},[53,33458,33459],{"class":82}," metricGenerator ",[53,33461,390],{"class":389},[53,33463,33464],{"class":59}," metricGeneratorForHost",[53,33466,1067],{"class":82},[53,33468,33469],{"class":63},"\"localhost:8080\"",[53,33471,1079],{"class":82},[53,33473,33474,33476,33479,33481,33484,33487,33490,33493,33495,33498,33500,33503],{"class":55,"line":126},[53,33475,24067],{"class":389},[53,33477,33478],{"class":82}," mReqTime ",[53,33480,390],{"class":389},[53,33482,33483],{"class":82}," metricGenerator.",[53,33485,33486],{"class":59},"metric",[53,33488,33489],{"class":82},"(site, ",[53,33491,33492],{"class":63},"\"maxRequestTime\"",[53,33494,5881],{"class":82},[53,33496,33497],{"class":59},"divide",[53,33499,1067],{"class":82},[53,33501,33502],{"class":89},"1000",[53,33504,1079],{"class":82},[53,33506,33507,33510,33513,33515,33517],{"class":55,"line":163},[53,33508,33509],{"class":82},"mReqTime.",[53,33511,33512],{"class":59},"toString",[53,33514,1245],{"class":389},[53,33516,14995],{"class":389},[53,33518,14775],{"class":82},[53,33520,33521,33523,33525,33527,33530],{"class":55,"line":186},[53,33522,11818],{"class":389},[53,33524,33445],{"class":82},[53,33526,11314],{"class":389},[53,33528,33529],{"class":63}," \" maxReqT\"",[53,33531,1727],{"class":82},[53,33533,33534],{"class":55,"line":221},[53,33535,15067],{"class":82},[18,33537,33538],{},"The metricGenerator returns a metric which returns the maximal request time of a JBoss request in milliseconds. We\nspecify a filter which devides each datapoint by 1000 so we get more readable numbers and define a toString function so\nthe description takes less space in the graph.",[18,33540,33541],{},"Now we bind the metrics to a HTML element:",[43,33543,33545],{"className":7103,"code":33544,"language":7105,"meta":48,"style":48},"d3.select(\"#mymetric\").call(function(div) { div.append(\"div\") .attr(\"class\",\n\"axis\") .call(context.axis().orient(\"top\")); div.selectAll(\".horizon\")\n.data([mReqTime]) .enter().append(\"div\") .attr(\"class\", \"horizon\")\n.call(context.horizon()); div.append(\"div\") .attr(\"class\", \"rule\")\n.call(context.rule()); }); // On mousemove, reposition the chart values to match\nthe rule. context.on(\"focus\", function(i) {\nd3.selectAll(\".value\").style(\"right\", i == null ? null : context.size() - i +\n\"px\"); });\n",[50,33546,33547,33552,33557,33562,33567,33572,33577,33582],{"__ignoreMap":48},[53,33548,33549],{"class":55,"line":56},[53,33550,33551],{"class":82},"d3.select(\"#mymetric\").call(function(div) { div.append(\"div\") .attr(\"class\",\n",[53,33553,33554],{"class":55,"line":86},[53,33555,33556],{"class":82},"\"axis\") .call(context.axis().orient(\"top\")); div.selectAll(\".horizon\")\n",[53,33558,33559],{"class":55,"line":126},[53,33560,33561],{"class":82},".data([mReqTime]) .enter().append(\"div\") .attr(\"class\", \"horizon\")\n",[53,33563,33564],{"class":55,"line":163},[53,33565,33566],{"class":82},".call(context.horizon()); div.append(\"div\") .attr(\"class\", \"rule\")\n",[53,33568,33569],{"class":55,"line":186},[53,33570,33571],{"class":82},".call(context.rule()); }); // On mousemove, reposition the chart values to match\n",[53,33573,33574],{"class":55,"line":221},[53,33575,33576],{"class":82},"the rule. context.on(\"focus\", function(i) {\n",[53,33578,33579],{"class":55,"line":242},[53,33580,33581],{"class":82},"d3.selectAll(\".value\").style(\"right\", i == null ? null : context.size() - i +\n",[53,33583,33584],{"class":55,"line":273},[53,33585,33586],{"class":82},"\"px\"); });\n",[18,33588,33589,33590,33596,33597,33600],{},"This produces a time axis on top and the horizon charts. Additionally we catch a mouse-over which will show the\nconcrete values for each horizon chart below the cursor. The horizon chart of course has more\noptions, ",[585,33591,33595],{"href":33592,"rel":33593,"title":33594},"https://github.com/square/cubism/wiki/Horizon#wiki-colors",[589],"Cubism Horizon API","color"," only being one of them.\nThe d3 ",[50,33598,33599],{},"data()"," function takes an array of one or more metric definitions.",[18,33602,33603],{},"Now for the metricGenerator, it is not bug free but it works for me TM.",[43,33605,33607],{"className":14754,"code":33606,"language":14756,"meta":48,"style":48},"var metricGeneratorForHost = function (host) {\n if (!arguments.length) host = \"localhost:8080\";\n var source = {};\n var cubism_cubeFormatDate = d3.time.format.iso;\n source.metric = function (site, expression) {\n return context.metric(\n function (start, stop, step, callback) {\n var url =\n host +\n \"/1.0/metric\" +\n \"?site=\" +\n encodeURIComponent(site) +\n \"&expression=\" +\n encodeURIComponent(expression) +\n \"&start=\" +\n cubism_cubeFormatDate(start) +\n \"&stop=\" +\n cubism_cubeFormatDate(stop) +\n \"&step=\" +\n step;\n d3.json(url, function (data) {\n if (!data) return callback(new Error(\"unable to load data\"));\n data.forEach(function (d) {\n cubism_cubeFormatDate.parse(d.date);\n d.value = parseInt(d.value);\n });\n callback(\n null,\n data.map(function (d) {\n return d.value;\n }),\n );\n });\n },\n \"\" + site + \" \" + expression,\n );\n };\n // Returns the Cube host.\n source.toString = function () {\n return \"\" + site + \" \" + expression;\n };\n return source;\n};\n",[50,33608,33609,33626,33652,33664,33676,33699,33710,33734,33745,33752,33760,33767,33777,33784,33793,33800,33810,33817,33826,33833,33838,33857,33888,33906,33917,33930,33935,33942,33949,33966,33974,33979,33984,33989,33994,34013,34017,34021,34026,34038,34058,34062,34069],{"__ignoreMap":48},[53,33610,33611,33613,33615,33617,33619,33621,33624],{"class":55,"line":56},[53,33612,24067],{"class":389},[53,33614,33464],{"class":59},[53,33616,1245],{"class":389},[53,33618,14995],{"class":389},[53,33620,7314],{"class":82},[53,33622,33623],{"class":6186},"host",[53,33625,11325],{"class":82},[53,33627,33628,33630,33632,33634,33637,33639,33642,33645,33647,33650],{"class":55,"line":86},[53,33629,6749],{"class":389},[53,33631,7314],{"class":82},[53,33633,11914],{"class":389},[53,33635,33636],{"class":89},"arguments",[53,33638,986],{"class":82},[53,33640,33641],{"class":89},"length",[53,33643,33644],{"class":82},") host ",[53,33646,390],{"class":389},[53,33648,33649],{"class":63}," \"localhost:8080\"",[53,33651,1727],{"class":82},[53,33653,33654,33656,33659,33661],{"class":55,"line":126},[53,33655,14780],{"class":389},[53,33657,33658],{"class":82}," source ",[53,33660,390],{"class":389},[53,33662,33663],{"class":82}," {};\n",[53,33665,33666,33668,33671,33673],{"class":55,"line":163},[53,33667,14780],{"class":389},[53,33669,33670],{"class":82}," cubism_cubeFormatDate ",[53,33672,390],{"class":389},[53,33674,33675],{"class":82}," d3.time.format.iso;\n",[53,33677,33678,33681,33683,33685,33687,33689,33692,33694,33697],{"class":55,"line":186},[53,33679,33680],{"class":82}," source.",[53,33682,33486],{"class":59},[53,33684,1245],{"class":389},[53,33686,14995],{"class":389},[53,33688,7314],{"class":82},[53,33690,33691],{"class":6186},"site",[53,33693,99],{"class":82},[53,33695,33696],{"class":6186},"expression",[53,33698,11325],{"class":82},[53,33700,33701,33703,33706,33708],{"class":55,"line":221},[53,33702,11007],{"class":389},[53,33704,33705],{"class":82}," context.",[53,33707,33486],{"class":59},[53,33709,1139],{"class":82},[53,33711,33712,33715,33717,33719,33721,33724,33726,33728,33730,33732],{"class":55,"line":242},[53,33713,33714],{"class":389}," function",[53,33716,7314],{"class":82},[53,33718,8023],{"class":6186},[53,33720,99],{"class":82},[53,33722,33723],{"class":6186},"stop",[53,33725,99],{"class":82},[53,33727,33406],{"class":6186},[53,33729,99],{"class":82},[53,33731,23760],{"class":6186},[53,33733,11325],{"class":82},[53,33735,33736,33739,33742],{"class":55,"line":273},[53,33737,33738],{"class":389}," var",[53,33740,33741],{"class":82}," url ",[53,33743,33744],{"class":389},"=\n",[53,33746,33747,33750],{"class":55,"line":279},[53,33748,33749],{"class":82}," host ",[53,33751,11359],{"class":389},[53,33753,33754,33757],{"class":55,"line":496},[53,33755,33756],{"class":63}," \"/1.0/metric\"",[53,33758,33759],{"class":389}," +\n",[53,33761,33762,33765],{"class":55,"line":503},[53,33763,33764],{"class":63}," \"?site=\"",[53,33766,33759],{"class":389},[53,33768,33769,33772,33775],{"class":55,"line":509},[53,33770,33771],{"class":59}," encodeURIComponent",[53,33773,33774],{"class":82},"(site) ",[53,33776,11359],{"class":389},[53,33778,33779,33782],{"class":55,"line":515},[53,33780,33781],{"class":63}," \"&expression=\"",[53,33783,33759],{"class":389},[53,33785,33786,33788,33791],{"class":55,"line":521},[53,33787,33771],{"class":59},[53,33789,33790],{"class":82},"(expression) ",[53,33792,11359],{"class":389},[53,33794,33795,33798],{"class":55,"line":527},[53,33796,33797],{"class":63}," \"&start=\"",[53,33799,33759],{"class":389},[53,33801,33802,33805,33808],{"class":55,"line":533},[53,33803,33804],{"class":59}," cubism_cubeFormatDate",[53,33806,33807],{"class":82},"(start) ",[53,33809,11359],{"class":389},[53,33811,33812,33815],{"class":55,"line":539},[53,33813,33814],{"class":63}," \"&stop=\"",[53,33816,33759],{"class":389},[53,33818,33819,33821,33824],{"class":55,"line":545},[53,33820,33804],{"class":59},[53,33822,33823],{"class":82},"(stop) ",[53,33825,11359],{"class":389},[53,33827,33828,33831],{"class":55,"line":2414},[53,33829,33830],{"class":63}," \"&step=\"",[53,33832,33759],{"class":389},[53,33834,33835],{"class":55,"line":2426},[53,33836,33837],{"class":82}," step;\n",[53,33839,33840,33843,33845,33848,33850,33852,33855],{"class":55,"line":2438},[53,33841,33842],{"class":82}," d3.",[53,33844,75],{"class":59},[53,33846,33847],{"class":82},"(url, ",[53,33849,6170],{"class":389},[53,33851,7314],{"class":82},[53,33853,33854],{"class":6186},"data",[53,33856,11325],{"class":82},[53,33858,33859,33862,33864,33866,33869,33871,33873,33875,33877,33880,33882,33885],{"class":55,"line":2451},[53,33860,33861],{"class":389}," if",[53,33863,7314],{"class":82},[53,33865,11914],{"class":389},[53,33867,33868],{"class":82},"data) ",[53,33870,15173],{"class":389},[53,33872,14795],{"class":59},[53,33874,1067],{"class":82},[53,33876,23831],{"class":389},[53,33878,33879],{"class":59}," Error",[53,33881,1067],{"class":82},[53,33883,33884],{"class":63},"\"unable to load data\"",[53,33886,33887],{"class":82},"));\n",[53,33889,33890,33893,33895,33897,33899,33901,33904],{"class":55,"line":2459},[53,33891,33892],{"class":82}," data.",[53,33894,15083],{"class":59},[53,33896,1067],{"class":82},[53,33898,6170],{"class":389},[53,33900,7314],{"class":82},[53,33902,33903],{"class":6186},"d",[53,33905,11325],{"class":82},[53,33907,33908,33911,33914],{"class":55,"line":2470},[53,33909,33910],{"class":82}," cubism_cubeFormatDate.",[53,33912,33913],{"class":59},"parse",[53,33915,33916],{"class":82},"(d.date);\n",[53,33918,33919,33922,33924,33927],{"class":55,"line":2476},[53,33920,33921],{"class":82}," d.value ",[53,33923,390],{"class":389},[53,33925,33926],{"class":59}," parseInt",[53,33928,33929],{"class":82},"(d.value);\n",[53,33931,33932],{"class":55,"line":2484},[53,33933,33934],{"class":82}," });\n",[53,33936,33937,33940],{"class":55,"line":2490},[53,33938,33939],{"class":59}," callback",[53,33941,1139],{"class":82},[53,33943,33944,33947],{"class":55,"line":2495},[53,33945,33946],{"class":89}," null",[53,33948,2252],{"class":82},[53,33950,33951,33954,33956,33958,33960,33962,33964],{"class":55,"line":2507},[53,33952,33953],{"class":82}," data.",[53,33955,12665],{"class":59},[53,33957,1067],{"class":82},[53,33959,6170],{"class":389},[53,33961,7314],{"class":82},[53,33963,33903],{"class":6186},[53,33965,11325],{"class":82},[53,33967,33968,33971],{"class":55,"line":2528},[53,33969,33970],{"class":389}," return",[53,33972,33973],{"class":82}," d.value;\n",[53,33975,33976],{"class":55,"line":2539},[53,33977,33978],{"class":82}," }),\n",[53,33980,33981],{"class":55,"line":2551},[53,33982,33983],{"class":82}," );\n",[53,33985,33986],{"class":55,"line":2562},[53,33987,33988],{"class":82}," });\n",[53,33990,33991],{"class":55,"line":2573},[53,33992,33993],{"class":82}," },\n",[53,33995,33996,33999,34001,34003,34005,34008,34010],{"class":55,"line":2585},[53,33997,33998],{"class":63}," \"\"",[53,34000,23602],{"class":389},[53,34002,33445],{"class":82},[53,34004,11314],{"class":389},[53,34006,34007],{"class":63}," \" \"",[53,34009,23602],{"class":389},[53,34011,34012],{"class":82}," expression,\n",[53,34014,34015],{"class":55,"line":2593},[53,34016,11214],{"class":82},[53,34018,34019],{"class":55,"line":2600},[53,34020,11122],{"class":82},[53,34022,34023],{"class":55,"line":2605},[53,34024,34025],{"class":3698}," // Returns the Cube host.\n",[53,34027,34028,34030,34032,34034,34036],{"class":55,"line":2610},[53,34029,33680],{"class":82},[53,34031,33512],{"class":59},[53,34033,1245],{"class":389},[53,34035,14995],{"class":389},[53,34037,14775],{"class":82},[53,34039,34040,34042,34045,34047,34049,34051,34053,34055],{"class":55,"line":2622},[53,34041,11007],{"class":389},[53,34043,34044],{"class":63}," \"\"",[53,34046,23602],{"class":389},[53,34048,33445],{"class":82},[53,34050,11314],{"class":389},[53,34052,34007],{"class":63},[53,34054,23602],{"class":389},[53,34056,34057],{"class":82}," expression;\n",[53,34059,34060],{"class":55,"line":2638},[53,34061,11122],{"class":82},[53,34063,34064,34066],{"class":55,"line":2649},[53,34065,11818],{"class":389},[53,34067,34068],{"class":82}," source;\n",[53,34070,34071],{"class":55,"line":2660},[53,34072,15067],{"class":82},[18,34074,34075,34076,17307,34079,34082],{},"This sets up the basic prototype of a metric which does background HTTP requests to pull data. We will deliver JSON from\nthe server, which gets parsed d3. Cubism does not care at all in which form it gets data, all it cares about are the\nfinal data-points in the callback function. The array with the data-points has to be the size of:\n",[50,34077,34078],{},"stop-start/stepping",[50,34080,34081],{},"d3.json(url, callback)"," gets the JSON, parses it and hands it over to the callback.",[43,34084,34086],{"className":13786,"code":34085,"language":13788,"meta":48,"style":48},"[{ site: \"TST\", date: \"2006-01-02T15:04:05.000Z\", value: \"12021\" }]\n",[50,34087,34088],{"__ignoreMap":48},[53,34089,34090],{"class":55,"line":56},[53,34091,34085],{},[18,34093,34094,34095,34098,34099,34101],{},"It takes each element of the array, converts the value to an int and finally (quite convoluted) hands an array of\nintegers to the ",[50,34096,34097],{},"callback(null, [12021])",". If it somehow fails to contact the server correctly it will call the\n",[50,34100,23760],{}," with an error.",[18,34103,34104],{},"This finishes up the client side of things. We just need a web-service which delivers the above JSON.",[18,34106,34107,34108],{},"Example Request:\n",[50,34109,34110],{},"http://localhost:8080/1.0/metric?site=TST&expression=maxRequestTime&start=2006-01-02T15:04:05.000Z&stop=2006-01-02T15:05:25.000Z&step=10000",[18,34112,34113],{},"A note on the behavior how Cubism will execute the metric callback. On site load, it will attempt to fill all\ndata-points. So it will generate a massive request for all missing information. Once it is loaded, I have observed (at\nleast for a stepping of 10 seconds) that it will ask for the last 70 seconds, not only for the last value.",[18,34115,34116],{},"The interpolation of the data is not done client side here, although it could be done here. Additionally there are a few\nbugs where I just didn’t have the time to investigate more. For example the last 7 data-points are always the same, and\nwhen asking for the full data-set it gets the time wrong (by the amount of daylight saving time). Not all too critical\nfor me so I didn’t investigate further.",[649,34118,34119],{"id":19428},"Server",[18,34121,34122,34123,34128,34129,7314,34134,5881],{},"The server part is split into the web-server part and the information gathering part. A sqlite3 database is used to\nexchange data. As we are adventurous, we will use the new fancy ",[585,34124,34127],{"href":34125,"rel":34126,"title":34127},"http://golang.org/",[589],"Go"," language\nfrom ",[585,34130,34133],{"href":34131,"rel":34132,"title":34133},"http://en.wikipedia.org/wiki/Rob_Pike",[589],"Rob Pike",[585,34135,25672],{"href":34136,"rel":34137,"title":34138},"http://google.com/",[589],"Google",[18,34140,34141,34142,4101,34146],{},"To avoid all too much code show, I have pushed a (cleaned) version of my project\nto ",[585,34143,10890],{"href":34144,"rel":34145,"title":10890},"https://github.com/",[589],[585,34147,34148],{"href":34148,"rel":34149,"title":34150},"https://github.com/BuJo/cubismsource",[589],"cubismsource",[18,34152,34153,34154,34157],{},"There you can follow my ",[14675,34155,34156],{},"misadventures"," development path and learning process – my proficiency in Go is sadly lacking.\nBut, I have put up a (hopefully) decent README to get an interesting party started.",[18,34159,34160],{},"The code is split into",[577,34162,34163,34166,34169],{},[580,34164,34165],{},"cubismsource: web-application which delivers the HTML site and the metrics from the database to cubism",[580,34167,34168],{},"jboss2sqlite: application to periodically save the jboss status into the database",[580,34170,34171],{},"jbossinfo: small library for handling the jboss status xml",[18,34173,34174],{},[2223,34175],{"alt":34176,"src":34177},"\"Screenshot from 2013-02-01 14:52:06\"","https://media.synyx.de/uploads//2013/02/Screenshot-from-2013-02-01-145206.png",[18,34179,34180],{},"This is a partial screenshot of three running JBoss instances running almost the same application. The observant reader\nwill of course spot oddities (like the maximum request time of the first instance climbing quite high there, like the\nfirst instance having a hiccup at the end).",[18,34182,34183,34184,34189],{},"The result is quite close to something\nlike ",[585,34185,34188],{"href":34186,"rel":34187,"title":34188},"https://web.archive.org/web/20130325101848/http://square.github.com:80/cube/",[589],"Cube"," or Jolokia – just a\nlittle less clever! Feel free to use the code in any way you want – it is “finished” in the sense that it is feature\ncomplete and unlikely to be extended by me.",[18,34191,34192],{},"So, what did we gain by putting the odd minute here and there into this?",[577,34194,34195,34198,34201,34204],{},[580,34196,34197],{},"The need of a third monitor",[580,34199,34200],{},"Beautiful forms and colors – the co-workers’ envy",[580,34202,34203],{},"Knowledge of visualizing things via Cubism",[580,34205,34206],{},"But, we loose points for the added complexity (~650 LoC) – which makes it less accessible from outside.",[18,34208,34209],{},"As with my previous blog articles – this is less about a “solution”, more a way to understand and learn a little more.",[607,34211,34212],{},"html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .s9eBZ, html code.shiki .s9eBZ{--shiki-default:#22863A;--shiki-dark:#85E89D}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .s4XuR, html code.shiki .s4XuR{--shiki-default:#E36209;--shiki-dark:#FFAB70}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sJ8bj, html code.shiki .sJ8bj{--shiki-default:#6A737D;--shiki-dark:#6A737D}",{"title":48,"searchDepth":86,"depth":86,"links":34214},[34215],{"id":33014,"depth":86,"text":33015,"children":34216},[34217,34218],{"id":33052,"depth":126,"text":33053},{"id":19428,"depth":126,"text":34119},[6869,613],"2013-02-19T17:36:29","This expands on the idea in\\nthe first part of this blog series. We will still be\\nworking NIH style here – this time to improve the visuals, user-interface and information density.","https://synyx.de/blog/monitoring-nih-style-part-2/",{},"/blog/monitoring-nih-style-part-2",{"title":32991,"description":34226},"This expands on the idea in\nthe first part of this blog series. We will still be\nworking NIH style here – this time to improve the visuals, user-interface and information density.","blog/monitoring-nih-style-part-2",[34229,34230,6882,27671],"cubism","devop","This expands on the idea in the first part of this blog series. We will still be working NIH style here – this time to improve the visuals, user-interface and…","QdBT57Gka6GBNRe4Jparu1tOJeI9d1UPNP1j-oqjFvc",{"id":34234,"title":34235,"author":34236,"body":34237,"category":34751,"date":34752,"description":34753,"extension":617,"link":34754,"meta":34755,"navigation":499,"path":34756,"seo":34757,"slug":34241,"stem":34758,"tags":34759,"teaser":34761,"__hash__":34762},"blog/blog/monitoring-nih-style.md","Monitoring – NIH style",[32993],{"type":11,"value":34238,"toc":34748},[34239,34242,34245,34248,34287,34290,34293,34307,34310,34313,34316,34324,34327,34329,34337,34348,34403,34406,34436,34439,34533,34542,34664,34667,34673,34682,34685,34701,34704,34707,34710,34713,34716,34719,34742,34745],[14,34240,34235],{"id":34241},"monitoring-nih-style",[18,34243,34244],{},"Not being a pure Developer but more of a DevOp brings all sorts of interesting problems. When you not only sell software\nbut are also included in the whole life-cycle you get a different view on things.",[18,34246,34247],{},"So there you have it – the production deployment of some software running at the customers site exhibits problems and\nwill be restarted by the support. This rarely leaves time to really analyze what went wrong, except a postmortem\nanalysis. This blog post is about ideas on how to be a little pro-active in gathering information before and after\nthings go horribly wrong.",[18,34249,34250,34251,34256,34257,34262,34263,34268,34269,34273,34274,34280,34281,34286],{},"There are many ways to monitor a system.",[585,34252,34255],{"href":34253,"rel":34254,"title":34255},"http://www.nagios.org",[589],"Nagios"," allows for monitoring the general\nhealth of a system. Something like ",[585,34258,34261],{"href":34259,"rel":34260,"title":34261},"http://ganglia.sourceforge.net",[589],"Ganglia"," can give an overview over\nperformance values. ",[585,34264,34267],{"href":34265,"rel":34266,"title":34267},"http://graylog2.org/",[589],"Graylog2"," can be used to monitor logs. In the Java\nworld ",[585,34270,33042],{"href":34271,"rel":34272,"title":33042},"http://www.jolokia.org",[589]," provides a good view to JMX and thus to the JVM. Specialized Software\nlike the ",[585,34275,34279],{"href":34276,"rel":34277,"title":34278},"http://www.jboss.org/rhq",[589],"RHQ","JBoss RHQ"," allows monitoring of the middleware and applications. Running\napplications on virtual servers provides additional management tools to monitor running virtual instances. A\nconfiguration management software like ",[585,34282,34285],{"href":34283,"rel":34284,"title":34285},"http://puppetlabs.com/",[589],"Puppet"," might provide additional information.\nAll this to detect irregularities in the system or to gain insight into a specific set of events.",[18,34288,34289],{},"So there is a huge variety in what to monitor, how to go about it, how notifications work, how much information can be\ngained.",[18,34291,34292],{},"So we have all those nice tools at our disposal – so – what is wrong?",[577,34294,34295,34298,34301,34304],{},[580,34296,34297],{},"No direct control over production systems.",[580,34299,34300],{},"Irregular failures.",[580,34302,34303],{},"Postmortem analysis hard/impossible due to missing information.",[580,34305,34306],{},"There is never enough time TM.",[18,34308,34309],{},"So, lets work with what we have – and do it with the least amount of work and hassle possible for everyone involved.",[2207,34311,34312],{"id":23796},"Prototype",[18,34314,34315],{},"The problem in the production deployment:",[577,34317,34318,34321],{},[580,34319,34320],{},"A memory leak in a Java application deployed in a JBoss 4 at (seemingly) random times at different sites.",[580,34322,34323],{},"No clue in the available information (logfiles) indicates a direct problem",[18,34325,34326],{},"We need more information. Optimally, it would be nice to see if we can detect the problem before the customer does and\nwarn the support staff of the production site. Failing that, we at least have more information to work on during\nanalysis.",[18,34328,33018],{},[577,34330,34331,34334],{},[580,34332,34333],{},"Get memory status from running application small intervals and save for later.",[580,34335,34336],{},"Plot the information.",[18,34338,34339,34340,34343,34344,34347],{},"With a base JBoss 4 we can get access to JVM information via ",[50,34341,34342],{},"/status?XML=true"," (in a few variants) and in\n",[50,34345,34346],{},"/web-console/ServerInfo.jsp",". Gathering the information for later is straight forward, write a 3 line shell script\nrunning in the background saving the output of JBoss to disk.",[43,34349,34351],{"className":45,"code":34350,"language":47,"meta":48,"style":48},"while true; do\ncurl -n $url -o \"html_src.$host/$(date --rfc-3339=seconds).html\"\nsleep $polltime\ndone\n",[50,34352,34353,34363,34391,34399],{"__ignoreMap":48},[53,34354,34355,34357,34359,34361],{"class":55,"line":56},[53,34356,31195],{"class":389},[53,34358,6374],{"class":89},[53,34360,31207],{"class":82},[53,34362,31210],{"class":389},[53,34364,34365,34367,34369,34372,34375,34378,34381,34383,34385,34388],{"class":55,"line":86},[53,34366,4231],{"class":59},[53,34368,4453],{"class":89},[53,34370,34371],{"class":82}," $url ",[53,34373,34374],{"class":89},"-o",[53,34376,34377],{"class":63}," \"html_src.",[53,34379,34380],{"class":82},"$host",[53,34382,31778],{"class":63},[53,34384,4390],{"class":59},[53,34386,34387],{"class":89}," --rfc-3339=seconds",[53,34389,34390],{"class":63},").html\"\n",[53,34392,34393,34396],{"class":55,"line":126},[53,34394,34395],{"class":59},"sleep",[53,34397,34398],{"class":82}," $polltime\n",[53,34400,34401],{"class":55,"line":163},[53,34402,31255],{"class":389},[18,34404,34405],{},"We pretend some time has passed and the problem on the production machine occurred again. We need to aggregate the\ncollected information. We iterate over a set of files and just grab the information we need to save it nicely structured\nin a CSV file for post-processing. We need a format our plotting application can read easily.",[43,34407,34409],{"className":13786,"code":34408,"language":13788,"meta":48,"style":48},"date,Free_Memory,Max_Memory,Total_Memory\n\"2013-01-10 00:00:09+01:00\",499292808,2067988480,714670080\n\"2013-01-10 00:00:19+01:00\",485999352,2067988480,714670080\n\"2013-01-10 00:00:29+01:00\",579714128,2067988480,714670080\n\"2013-01-10 00:00:39+01:00\",565887928,2067988480,714670080\n",[50,34410,34411,34416,34421,34426,34431],{"__ignoreMap":48},[53,34412,34413],{"class":55,"line":56},[53,34414,34415],{},"date,Free_Memory,Max_Memory,Total_Memory\n",[53,34417,34418],{"class":55,"line":86},[53,34419,34420],{},"\"2013-01-10 00:00:09+01:00\",499292808,2067988480,714670080\n",[53,34422,34423],{"class":55,"line":126},[53,34424,34425],{},"\"2013-01-10 00:00:19+01:00\",485999352,2067988480,714670080\n",[53,34427,34428],{"class":55,"line":163},[53,34429,34430],{},"\"2013-01-10 00:00:29+01:00\",579714128,2067988480,714670080\n",[53,34432,34433],{"class":55,"line":186},[53,34434,34435],{},"\"2013-01-10 00:00:39+01:00\",565887928,2067988480,714670080\n",[18,34437,34438],{},"And the conversion script from XML to CSV:",[43,34440,34442],{"className":45,"code":34441,"language":47,"meta":48,"style":48},"for filename in html_src.$host/*; do\nstat=\"$(xmllint --format $filename | sed -n\n's/.*memory.*free=\"\\([0-9]*\\)\".*total=\"\\([0-9]*\\)\".*max=\"\\([0-9]*\\)\".*/\\1,\\2,\\3/p')\"\necho \"\\\"$(basename -- $file .html)\\\",$stat\"\ndone\n",[50,34443,34444,34465,34490,34498,34529],{"__ignoreMap":48},[53,34445,34446,34448,34451,34453,34456,34458,34461,34463],{"class":55,"line":56},[53,34447,32362],{"class":389},[53,34449,34450],{"class":82}," filename ",[53,34452,32368],{"class":389},[53,34454,34455],{"class":63}," html_src.",[53,34457,34380],{"class":82},[53,34459,34460],{"class":63},"/*",[53,34462,31207],{"class":82},[53,34464,31210],{"class":389},[53,34466,34467,34470,34472,34474,34477,34480,34483,34485,34487],{"class":55,"line":86},[53,34468,34469],{"class":82},"stat",[53,34471,390],{"class":389},[53,34473,31169],{"class":63},[53,34475,34476],{"class":59},"xmllint",[53,34478,34479],{"class":89}," --format",[53,34481,34482],{"class":82}," $filename",[53,34484,6324],{"class":389},[53,34486,6327],{"class":59},[53,34488,34489],{"class":89}," -n\n",[53,34491,34492,34495],{"class":55,"line":126},[53,34493,34494],{"class":59},"'s/.*memory.*free=\"\\([0-9]*\\)\".*total=\"\\([0-9]*\\)\".*max=\"\\([0-9]*\\)\".*/\\1,\\2,\\3/p'",[53,34496,34497],{"class":63},")\"\n",[53,34499,34500,34502,34504,34506,34508,34511,34514,34517,34520,34522,34524,34527],{"class":55,"line":163},[53,34501,4450],{"class":89},[53,34503,6452],{"class":63},[53,34505,6232],{"class":89},[53,34507,4387],{"class":63},[53,34509,34510],{"class":59},"basename",[53,34512,34513],{"class":89}," --",[53,34515,34516],{"class":82}," $file",[53,34518,34519],{"class":63}," .html)",[53,34521,6232],{"class":89},[53,34523,1073],{"class":63},[53,34525,34526],{"class":82},"$stat",[53,34528,31375],{"class":63},[53,34530,34531],{"class":55,"line":186},[53,34532,31255],{"class":389},[18,34534,34535,34536,34541],{},"Feeding that into e.g. OpenOffice by hand isn’t hard but a little more hand-holding would be nice for convenience. Lets\ncreate a PDF document and let our document viewer handle the reload when the file changes. There are choices when it\ncomes to generating graphs – my personal favorite is ",[585,34537,34540],{"href":34538,"rel":34539,"title":34540},"http://www.r-project.org/",[589],"R",", a tool for statistical\ncomputing.",[43,34543,34547],{"className":34544,"code":34545,"language":34546,"meta":48,"style":48},"language-plain shiki shiki-themes github-light github-dark","!/usr/bin/R\nrequire(forecast)\noptions(digits.secs=6)\nargs \u003C- commandArgs(trailingOnly = TRUE)\nr2 \u003C- read.csv(file=args[1],head=TRUE,sep=\",\")\nr2$date = as.POSIXct(r2$date)\nr2$Free_Memory = as.numeric(r2$Free_Memory)\nr2$Max_Memory = as.numeric(r2$Max_Memory)\nr2$Total_Memory = as.numeric(r2$Total_Memory)\nr2$Free_Memory_avg \u003C- ma(r2$Free_Memory, 12)\nr2$Max_Memory_avg \u003C- ma(r2$Max_Memory, 12)\nr2$Total_Memory_avg \u003C- ma(r2$Total_Memory, 12)\nr2$used \u003C- r2$Total_Memory - r2$Free_Memory\nr2$used_avg \u003C- ma(r2$used, 3)\nmaxmem \u003C- (max(r2$Max_Memory))\npdf(\"ram.pdf\")\nplot(range(r2$date), range(r2$Max_Memory), type=\"n\",main=paste(args[2],\n\"JVM Used Mem\\n(\",args[3],\")\"),ylab=\"Memory\n(MB)\",xlab=\"Time\",ylim=c(50,maxmem))\nlines(r2$date, r2$Total_Memory_avg, type=\"l\",col=\"blue\")\nlines(r2$date, r2$Max_Memory_avg, type=\"l\",col=\"red\")\nlines(r2$date, r2$used_avg, type=\"l\",col=\"black\")\ndev.off()\n","plain",[50,34548,34549,34554,34559,34564,34569,34574,34579,34584,34589,34594,34599,34604,34609,34614,34619,34624,34629,34634,34639,34644,34649,34654,34659],{"__ignoreMap":48},[53,34550,34551],{"class":55,"line":56},[53,34552,34553],{},"!/usr/bin/R\n",[53,34555,34556],{"class":55,"line":86},[53,34557,34558],{},"require(forecast)\n",[53,34560,34561],{"class":55,"line":126},[53,34562,34563],{},"options(digits.secs=6)\n",[53,34565,34566],{"class":55,"line":163},[53,34567,34568],{},"args \u003C- commandArgs(trailingOnly = TRUE)\n",[53,34570,34571],{"class":55,"line":186},[53,34572,34573],{},"r2 \u003C- read.csv(file=args[1],head=TRUE,sep=\",\")\n",[53,34575,34576],{"class":55,"line":221},[53,34577,34578],{},"r2$date = as.POSIXct(r2$date)\n",[53,34580,34581],{"class":55,"line":242},[53,34582,34583],{},"r2$Free_Memory = as.numeric(r2$Free_Memory)\n",[53,34585,34586],{"class":55,"line":273},[53,34587,34588],{},"r2$Max_Memory = as.numeric(r2$Max_Memory)\n",[53,34590,34591],{"class":55,"line":279},[53,34592,34593],{},"r2$Total_Memory = as.numeric(r2$Total_Memory)\n",[53,34595,34596],{"class":55,"line":496},[53,34597,34598],{},"r2$Free_Memory_avg \u003C- ma(r2$Free_Memory, 12)\n",[53,34600,34601],{"class":55,"line":503},[53,34602,34603],{},"r2$Max_Memory_avg \u003C- ma(r2$Max_Memory, 12)\n",[53,34605,34606],{"class":55,"line":509},[53,34607,34608],{},"r2$Total_Memory_avg \u003C- ma(r2$Total_Memory, 12)\n",[53,34610,34611],{"class":55,"line":515},[53,34612,34613],{},"r2$used \u003C- r2$Total_Memory - r2$Free_Memory\n",[53,34615,34616],{"class":55,"line":521},[53,34617,34618],{},"r2$used_avg \u003C- ma(r2$used, 3)\n",[53,34620,34621],{"class":55,"line":527},[53,34622,34623],{},"maxmem \u003C- (max(r2$Max_Memory))\n",[53,34625,34626],{"class":55,"line":533},[53,34627,34628],{},"pdf(\"ram.pdf\")\n",[53,34630,34631],{"class":55,"line":539},[53,34632,34633],{},"plot(range(r2$date), range(r2$Max_Memory), type=\"n\",main=paste(args[2],\n",[53,34635,34636],{"class":55,"line":545},[53,34637,34638],{},"\"JVM Used Mem\\n(\",args[3],\")\"),ylab=\"Memory\n",[53,34640,34641],{"class":55,"line":2414},[53,34642,34643],{},"(MB)\",xlab=\"Time\",ylim=c(50,maxmem))\n",[53,34645,34646],{"class":55,"line":2426},[53,34647,34648],{},"lines(r2$date, r2$Total_Memory_avg, type=\"l\",col=\"blue\")\n",[53,34650,34651],{"class":55,"line":2438},[53,34652,34653],{},"lines(r2$date, r2$Max_Memory_avg, type=\"l\",col=\"red\")\n",[53,34655,34656],{"class":55,"line":2451},[53,34657,34658],{},"lines(r2$date, r2$used_avg, type=\"l\",col=\"black\")\n",[53,34660,34661],{"class":55,"line":2459},[53,34662,34663],{},"dev.off()\n",[18,34665,34666],{},"This does nothing more than read the CSV file, some data type conversions, figuring out some mins and maxes, calculating\na moving average so we get a smoother view and plot a few lines showing us the memory behavior.",[18,34668,34669],{},[2223,34670],{"alt":34671,"src":34672},"\"ram\"","https://media.synyx.de/uploads//2013/02/ram.png",[18,34674,34675,34676,34681],{},"With a endless loop calling R with the above script… we now have replicated what other tools would already have done for\nus, collect information, plot it in real time. Again. As with other\ntools (",[585,34677,34680],{"href":34678,"rel":34679,"title":34680},"http://graphite.wikidot.com/",[589],"Graphite"," comes to mind) we can plot arbitrary time ranges, zooming in\nweird behavior, as shown above, merely by generating the CSV file differently. This is all very low-tech. We did not\nwrite any application code, we merely glued existing technology together.",[18,34683,34684],{},"So now we have more information:",[577,34686,34687],{},[580,34688,34689,34690],{},"We actually have a few different kinds of bad memory behavior:\n",[577,34691,34692,34695,34698],{},[580,34693,34694],{},"A case where over the course of 2-3 hours the memory fills up.",[580,34696,34697],{},"A spike where the memory instantly goes to max.",[580,34699,34700],{},"A continuous memory leak.",[18,34702,34703],{},"Having timings on when things actually start to go wrong is – quite plainly – awesome. Having Nagios alert on resource\nusage above a specified water mark can be quite valuable (in the first and third case mentioned). But does not as such\nhelp with the second one (except to tell you that the application is dead-ish).",[18,34705,34706],{},"The happy Ganglia/RHQ user will ask:But.. why again?",[18,34708,34709],{},"The programmer with a mind for patterns, elegance and performance will point to the mindless hundreds of megabytes\nfilling up my hard-drive space while this is running.(Not to speak of the lack of scheduling which leads to gaps in\ndata-collection when one JBoss stops responding in a timely manner.)",[18,34711,34712],{},"But that is exactly what I want. I not only want to know “uh oh.. I think your application is dead” (Nagios), not only\n“OK, this is how your memory looked the past 5 hours” (Ganglia) – I want enough context for later analysis.",[18,34714,34715],{},"In this case, the JBoss status page not only contains pure JVM information but also the HTTP requests, how long they are\nalready running and the (GET) request parameters. So I can also answer the question “how did the JBoss look like before\nit went all wonky” and have more data available to analyze later – such as hanging user-requests.",[18,34717,34718],{},"So what did we do here.",[577,34720,34721,34724,34727,34730,34733,34736,34739],{},[580,34722,34723],{},"Easily (for the creator) adaptable tool-chain for monitoring and data gathering",[580,34725,34726],{},"We used an obscure set of tools (Bash, curl, R, xmllint, filesystem as DB, pdf viewer with something like inotify).",[580,34728,34729],{},"There is “yet another tool”",[580,34731,34732],{},"The tool needs regular maintenance (truncating old data)",[580,34734,34735],{},"The tool is very “specific” – it is too small to even attempt to be “generic”",[580,34737,34738],{},"But, the same mechanisms can be used to monitor something completely different",[580,34740,34741],{},"And: does not only gather numeric data",[18,34743,34744],{},"This of course should not supplant your existing monitoring infrastructure, all insight gained should be properly\nintegrated. This is more a way of thinking, of developing an idea – if you have more, other, opposing ones, please do\nshare!",[607,34746,34747],{},"html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}",{"title":48,"searchDepth":86,"depth":86,"links":34749},[34750],{"id":23796,"depth":86,"text":34312},[6869,613],"2013-02-11T19:10:40","Not being a pure Developer but more of a DevOp brings all sorts of interesting problems. When you not only sell software\\nbut are also included in the whole life-cycle you get a different view on things.","https://synyx.de/blog/monitoring-nih-style/",{},"/blog/monitoring-nih-style",{"title":34235,"description":34244},"blog/monitoring-nih-style",[34230,27671,34760],"r","Not being a pure Developer but more of a DevOp brings all sorts of interesting problems. When you not only sell software but are also included in the whole life-cycle…","-Oa3Oq9DZqiJ4O_nNim1AmDrqkdb1lRs_zT7ocl1SYA",{"id":34764,"title":34765,"author":34766,"body":34767,"category":35204,"date":35205,"description":35206,"extension":617,"link":35207,"meta":35208,"navigation":499,"path":35209,"seo":35210,"slug":35212,"stem":35213,"tags":35214,"teaser":35218,"__hash__":35219},"blog/blog/setup-selenium-grid.md","Acceptance testing at synyx – Part 3",[12981],{"type":11,"value":34768,"toc":35194},[34769,34772,34786,34790,34804,34807,34811,34814,34818,34821,34901,34904,34910,34917,34965,34971,34980,34984,34990,34997,35008,35037,35040,35108,35112,35115,35131,35140,35144,35150,35158,35162,35165,35171,35174,35181,35188,35191],[14,34770,34765],{"id":34771},"acceptance-testing-at-synyx-part-3",[18,34773,34774,34775,34779,34780,34785],{},"After showing you ",[585,34776,34778],{"href":32799,"rel":34777},[589],"how to request a remote browser"," from a Selenium Grid\nin the last part its time to put some effort in getting the grid running smoothly. Also, check out\nthe ",[585,34781,34784],{"href":34782,"rel":34783},"http://blog.synyx.de/2013/01/atdd-at-synyx/",[589],"first part of the series"," for the greater context of this blog post.",[649,34787,34789],{"id":34788},"setting-up-the-grid","Setting up the Grid",[18,34791,34792,34793,34797,34798,34803],{},"As mentioned in ",[585,34794,34796],{"href":32799,"rel":34795},[589],"part 2",", its pretty straight forward to set up a\nselenium-grid by following the instructions at the ",[585,34799,34802],{"href":34800,"rel":34801},"http://code.google.com/p/selenium/wiki/Grid2",[589],"Selenium Wiki",". This\nworked out pretty well for us when setting up a simple hub plus node configuration on the dev-machines for evaluation\npurposes.",[18,34805,34806],{},"But as soon as you use selenium grid in production you probably want to wrap some professional services around this to\nmake your local system administrator happy. Currently we are running the explained setup on a hub-node (server) as well\nas two virtual machines running the browsers (one ubuntu-based one windows7). But the system is designed to add more\nnodes as soon as they’re needed. And they will be needed soon, especially because the beloved Internet Explorer comes in\nso many flavours and tests do not run very fast on them compared to Chrome or even Firefox ;).",[649,34808,34810],{"id":34809},"the-linux-machines","The Linux machines",[18,34812,34813],{},"The (virtual) machine running the selenium hub and the one running browsers on ubuntu are linux based machines of\ncourse. If you run services on linux-based machines you usually want to have them run as a daemon in background,\nstarted automatically upon system boot. So of course this is what I did here. I describe the way this is done on\ndebian / ubuntu systems but this will probably work on other distributions with minimal adjustments.",[649,34815,34817],{"id":34816},"starting-the-hub","Starting the Hub",[18,34819,34820],{},"Selenium provides a “runnable” jar file to start the server by something like",[43,34822,34824],{"className":30754,"code":34823,"language":30756,"meta":48,"style":48},"$ java -jar selenium-server.jar -role hub\nJan 16, 2013 2:14:39 PM org.openqa.grid.selenium.GridLauncher main\nInformation: Launching a selenium grid server\n2013-01-16 14:14:40.662:INFO:osjs.Server:jetty-7.x.y-SNAPSHOT\n...\n",[50,34825,34826,34846,34869,34889,34897],{"__ignoreMap":48},[53,34827,34828,34831,34834,34837,34840,34843],{"class":55,"line":56},[53,34829,34830],{"class":59},"$",[53,34832,34833],{"class":63}," java",[53,34835,34836],{"class":89}," -jar",[53,34838,34839],{"class":63}," selenium-server.jar",[53,34841,34842],{"class":89}," -role",[53,34844,34845],{"class":63}," hub\n",[53,34847,34848,34851,34854,34857,34860,34863,34866],{"class":55,"line":86},[53,34849,34850],{"class":59},"Jan",[53,34852,34853],{"class":63}," 16,",[53,34855,34856],{"class":89}," 2013",[53,34858,34859],{"class":63}," 2:14:39",[53,34861,34862],{"class":63}," PM",[53,34864,34865],{"class":63}," org.openqa.grid.selenium.GridLauncher",[53,34867,34868],{"class":63}," main\n",[53,34870,34871,34874,34877,34880,34883,34886],{"class":55,"line":126},[53,34872,34873],{"class":59},"Information:",[53,34875,34876],{"class":63}," Launching",[53,34878,34879],{"class":63}," a",[53,34881,34882],{"class":63}," selenium",[53,34884,34885],{"class":63}," grid",[53,34887,34888],{"class":63}," server\n",[53,34890,34891,34894],{"class":55,"line":163},[53,34892,34893],{"class":59},"2013-01-16",[53,34895,34896],{"class":63}," 14:14:40.662:INFO:osjs.Server:jetty-7.x.y-SNAPSHOT\n",[53,34898,34899],{"class":55,"line":186},[53,34900,6348],{"class":89},[18,34902,34903],{},"The server binds itself on the configured port and keeps running until you hit CTRL-C while printing out log statements\nto STDOUT.",[18,34905,34906,34907,986],{},"So the first thing I did is wrapp this within a upstart-script that made some stuff configurable and readable (where to\nlog to, which java to use, where is the selenium-jar, the config and so on). I named this script ",[50,34908,34909],{},"selenium-hub",[18,34911,34912,34913,34916],{},"For the sake of simplicity I put all my files to ",[50,34914,34915],{},"/opt/selenium/",". This might not be the best “unix-way” but doing so\nhelps me find all the files easily.",[43,34918,34920],{"className":30754,"code":34919,"language":30756,"meta":48,"style":48},"$ ls /opt/selenium\nhubconfig.json\nupstart-selenium-hub\nselenium-hub\nselenium-server.jar -> selenium-server-standalone-2.26.0.jar\nselenium-server-standalone-2.26.0.jar\n",[50,34921,34922,34932,34937,34942,34947,34960],{"__ignoreMap":48},[53,34923,34924,34926,34929],{"class":55,"line":56},[53,34925,34830],{"class":59},[53,34927,34928],{"class":63}," ls",[53,34930,34931],{"class":63}," /opt/selenium\n",[53,34933,34934],{"class":55,"line":86},[53,34935,34936],{"class":59},"hubconfig.json\n",[53,34938,34939],{"class":55,"line":126},[53,34940,34941],{"class":59},"upstart-selenium-hub\n",[53,34943,34944],{"class":55,"line":163},[53,34945,34946],{"class":59},"selenium-hub\n",[53,34948,34949,34952,34955,34957],{"class":55,"line":186},[53,34950,34951],{"class":59},"selenium-server.jar",[53,34953,34954],{"class":82}," -",[53,34956,1084],{"class":389},[53,34958,34959],{"class":63}," selenium-server-standalone-2.26.0.jar\n",[53,34961,34962],{"class":55,"line":221},[53,34963,34964],{"class":59},"selenium-server-standalone-2.26.0.jar\n",[18,34966,34967,34968,5881],{},"As you can see I also added some symlinks so that I dont need to adjust my scripts as soon as I upgrade selenium-server\njar to a newer version (",[50,34969,34970],{},"ln -s selenium-server-standalone-2.26.0.jar selenium-server.jar",[18,34972,34973,34974,34979],{},"If you want to have a detailed look at the files you can check out\nthe ",[585,34975,34978],{"href":34976,"rel":34977},"https://media.synyx.de/uploads//2013/01/synyx-selenium-grid-scripts.tar.gz",[589],"archive containing all scripts"," I\nwrote.",[649,34981,34983],{"id":34982},"managing-the-hub-as-a-service","Managing the Hub as a service",[18,34985,34986,34987,986],{},"So the next thing to be done is starting and stopping the services using init-scripts. This is usually done by adding\nthe scripts to ",[50,34988,34989],{},"/etc/init.d/$servicename",[18,34991,34992,34993,34996],{},"My upstart-scripts are basically copied from the skeleton-file ubuntu brings (",[50,34994,34995],{},"/etc/init.d/skeleton",") adjusted for my\nneeds.",[18,34998,34999,35000,35003,35004,35007],{},"This script takes care of managing the pid-file for the process and starting and stopping it correctly. So I put the\nupstart-script to ",[50,35001,35002],{},"/opt/selenium"," and symlinked them to ",[50,35005,35006],{},"/etc/init.d/",". Then the script can be added to the systems\nrunlevels (so that it will be started automatically upon boot).",[43,35009,35011],{"className":30754,"code":35010,"language":30756,"meta":48,"style":48},"ln -s /opt/selenium/upstart-selenium-hub /etc/init.d/selenium-hub\nupdate-rc.d selenium-hub defaults\n",[50,35012,35013,35026],{"__ignoreMap":48},[53,35014,35015,35018,35020,35023],{"class":55,"line":56},[53,35016,35017],{"class":59},"ln",[53,35019,6201],{"class":89},[53,35021,35022],{"class":63}," /opt/selenium/upstart-selenium-hub",[53,35024,35025],{"class":63}," /etc/init.d/selenium-hub\n",[53,35027,35028,35031,35034],{"class":55,"line":86},[53,35029,35030],{"class":59},"update-rc.d",[53,35032,35033],{"class":63}," selenium-hub",[53,35035,35036],{"class":63}," defaults\n",[18,35038,35039],{},"So the hub can be now managed using the “service” command.",[43,35041,35043],{"className":30754,"code":35042,"language":30756,"meta":48,"style":48},"$ sudo service selenium-hub status\n * selenium-hub is not running\n$ sudo service selenium-hub start\n$ sudo service selenium-hub status\n * selenium-hub is running\n",[50,35044,35045,35059,35073,35086,35098],{"__ignoreMap":48},[53,35046,35047,35049,35052,35055,35057],{"class":55,"line":56},[53,35048,34830],{"class":59},[53,35050,35051],{"class":63}," sudo",[53,35053,35054],{"class":63}," service",[53,35056,35033],{"class":63},[53,35058,8481],{"class":63},[53,35060,35061,35063,35065,35067,35070],{"class":55,"line":86},[53,35062,1058],{"class":59},[53,35064,35033],{"class":63},[53,35066,7198],{"class":63},[53,35068,35069],{"class":63}," not",[53,35071,35072],{"class":63}," running\n",[53,35074,35075,35077,35079,35081,35083],{"class":55,"line":126},[53,35076,34830],{"class":59},[53,35078,35051],{"class":63},[53,35080,35054],{"class":63},[53,35082,35033],{"class":63},[53,35084,35085],{"class":63}," start\n",[53,35087,35088,35090,35092,35094,35096],{"class":55,"line":163},[53,35089,34830],{"class":59},[53,35091,35051],{"class":63},[53,35093,35054],{"class":63},[53,35095,35033],{"class":63},[53,35097,8481],{"class":63},[53,35099,35100,35102,35104,35106],{"class":55,"line":186},[53,35101,1058],{"class":59},[53,35103,35033],{"class":63},[53,35105,7198],{"class":63},[53,35107,35072],{"class":63},[649,35109,35111],{"id":35110},"doing-the-same-for-nodes","Doing the same for Nodes",[18,35113,35114],{},"The procedure for nodes is pretty much the same as for the hub but it has some smaller differences:",[18,35116,35117,35118,35121,35122,35127,35128,35130],{},"Our system administration set up the ubuntu machine so that it automatically starts X with unity while automatically\nlogging in the user “synyx” to the desktop. Since this should run the browsers the selenium-server process has to be\nable to access the display. This is usually done by exporting the correct information to the ",[50,35119,35120],{},"DISPLAY","\nenvironment-variable. Also selenium-server needs a system-property where to find the chromedriver\nbinary (",[585,35123,35126],{"href":35124,"rel":35125},"http://code.google.com/p/selenium/wiki/ChromeDriver",[589],"see documentation","). So I adjusted my start-script and my\nupstart-script (since it has to access the display its best to run the server as the same user the X-Session belongs\nto) and added everything new to ",[50,35129,35002],{}," and updated my runlevels.",[18,35132,35133,35134,35139],{},"Also we use ",[585,35135,35138],{"href":35136,"rel":35137},"http://en.wikipedia.org/wiki/Vino_%28VNC_server%29",[589],"Vino-server"," to be able to connect via VNC to the\nDesktop (and view the browsers working) in case you want to reproduce bugs and so on.",[649,35141,35143],{"id":35142},"configuring-the-grid","Configuring the grid",[18,35145,35146,35147,35149],{},"Selenium-Server can be configured using command-line arguments or by JSON. I preferred the JSON way and added\nconfiguration-files to ",[50,35148,34915],{}," and added the -nodeConfig /-hubConfig parameter in my start-scripts. Here\nyou configure timeouts, urls and what kind of browsers the instance provides to the grid.",[18,35151,35152,35153,986],{},"The available parameters as well as defaults can be best looked up at\nthe ",[585,35154,35157],{"href":35155,"rel":35156},"http://code.google.com/p/selenium/source/browse/#git%2Fjava%2Fserver%2Fsrc%2Forg%2Fopenqa%2Fgrid%2Fcommon%2Fdefaults",[589],"selenium svn",[649,35159,35161],{"id":35160},"getting-some-browsers","Getting some browsers",[18,35163,35164],{},"As soon as the hub is running try to access it with a browser and navigate to its console. Here you can see how many\nhosts there are, what kind of browsers they supply and so on. Here is an example of a grid with one node providing\nFirefox and Chrome instances, one providing Internet Explorers.",[18,35166,35167],{},[2223,35168],{"alt":35169,"src":35170},"Console of a Selenium Hub","https://media.synyx.de/uploads//2013/01/grid-console.png",[649,35172,35173],{"id":33346},"Context",[18,35175,35176,35177,986],{},"You request these Browsers using the RemoteWebDriver\nas ",[585,35178,35180],{"href":32799,"rel":35179},[589],"described in the last post",[18,35182,35183,35184,986],{},"In case you want do to something like this you can find all files I mentioned here to\ndownload: ",[585,35185,35187],{"href":34976,"rel":35186},[589],"synyx-selenium-grid-scripts.tar.gz",[18,35189,35190],{},"In the upcoming post I will describe how to add a Windows based node to the grid using a similar approach. Ah, and after\nthat one we’re done with the technical stuff and proceed to questions like how do we test, how do we report results and\nso on…",[607,35192,35193],{},"html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}",{"title":48,"searchDepth":86,"depth":86,"links":35195},[35196,35197,35198,35199,35200,35201,35202,35203],{"id":34788,"depth":126,"text":34789},{"id":34809,"depth":126,"text":34810},{"id":34816,"depth":126,"text":34817},{"id":34982,"depth":126,"text":34983},{"id":35110,"depth":126,"text":35111},{"id":35142,"depth":126,"text":35143},{"id":35160,"depth":126,"text":35161},{"id":33346,"depth":126,"text":35173},[613],"2013-02-04T16:32:28","After showing you how to request a remote browser from a Selenium Grid\\nin the last part its time to put some effort in getting the grid running smoothly. Also, check out\\nthe first part of the series for the greater context of this blog post.","https://synyx.de/blog/setup-selenium-grid/",{},"/blog/setup-selenium-grid",{"title":34765,"description":35211},"After showing you how to request a remote browser from a Selenium Grid\nin the last part its time to put some effort in getting the grid running smoothly. Also, check out\nthe first part of the series for the greater context of this blog post.","setup-selenium-grid","blog/setup-selenium-grid",[35215,32983,35216,35217,32985,21474],"cloud","linux","remotewebdriver","After showing you how to request a remote browser from a Selenium Grid in the last part its time to put some effort in getting the grid running smoothly. Also,…","kLH6OE-BvQ8fU_Pr6ifbublwFLiOlzW_UcpYQvMYOKg",{"id":35221,"title":35222,"author":35223,"body":35224,"category":35588,"date":35589,"description":35590,"extension":617,"link":35591,"meta":35592,"navigation":499,"path":35593,"seo":35594,"slug":35596,"stem":35597,"tags":35598,"teaser":35600,"__hash__":35601},"blog/blog/remote-browsers.md","Acceptance testing at synyx – Part 2",[12981],{"type":11,"value":35225,"toc":35580},[35226,35229,35235,35239,35248,35251,35254,35257,35266,35269,35272,35278,35282,35288,35294,35333,35337,35344,35387,35396,35535,35547,35550,35564,35571,35575,35578],[14,35227,35222],{"id":35228},"acceptance-testing-at-synyx-part-2",[18,35230,18606,35231,35234],{},[585,35232,34784],{"href":34782,"rel":35233},[589]," I gave some reasons why to do acceptance\ntesting (or webtests) as well as a rough overview how we do it at synyx. This part is rather technical and describes how\nto use Seleniums RemoteWebDriver to control browsers on a remote host.",[2207,35236,35238],{"id":35237},"running-browsers-elsewhere-selenium-grid","Running browsers elsewhere – Selenium GRID",[18,35240,35241,35242,35247],{},"One important thing for me is that the browsers that are used to execute the webtests should not run on the same host as\nthe tests. This way we can minimize setup for the tests on the developers machine. Also, the machine running our\nCI-System ",[585,35243,35246],{"href":35244,"rel":35245},"http://jenkins-ci.org/",[589],"Jenkins"," is a headless server where no browsers can be installed. Therefore we also\nneed the remoting capability for Jenkins.",[18,35249,35250],{},"And another important argument for running browsers on a remote host is that probably not all browsers you want to run\nyour tests on can be installed on a developers machine. For example my unix machine refuses to run Internet\nExplorer ;-).",[18,35252,35253],{},"So in my opinion the best choice to execute tests is using Seleniums RemoteWebdriver.",[649,35255,35256],{"id":35217},"RemoteWebDriver",[18,35258,35259,35260,35265],{},"If you are familiar with Selenium you know that in order to run your test in a browser you usually instanciate the\ncorrect WebDriver for it (e.g. FirefoxWebDriver to run a Firefox). But there is also RemoteWebDriver that can be used to\nsteer browsers on some other host. So this RemoteWebDriver acts like a proxy, talks to a remote-service called\nselenium-server using HTTP. Then selenium-server actually controls the browser.\nSee ",[585,35261,35264],{"href":35262,"rel":35263},"http://code.google.com/p/selenium/wiki/RemoteWebDriverServer",[589],"Seleniums documentation"," for further details.",[18,35267,35268],{},"As mentioned you have to have a selenium-server instance to use RemoteWebDriver. Selenium-server can run in\nstandalone-mode or – if you want to scale up – as grid with a hub and many nodes. In this case you have one hub which\nonly matches requested capabilities and delegates clients to existing hosts on selenium nodes.",[18,35270,35271],{},"The following graphic shows you how a setup using Selenium Grid (hub and nodes) could look like:",[18,35273,35274],{},[2223,35275],{"alt":35276,"src":35277},"How selenium grid looks like","https://media.synyx.de/uploads//2013/01/selenium-grid.png",[649,35279,35281],{"id":35280},"requesting-a-browser-from-the-grid","Requesting a browser from the grid",[18,35283,35284,35285,986],{},"Once here is a server running you are able to request a browser from it by instanciating RemoteWebDriver with the URL of\nit. In addition you have to describe the browser you want using the class ",[50,35286,35287],{},"DesiredCapabilities",[18,35289,35290,35291],{},"The following code snippet shows how to request a chrome version 20 on any operating system from a selenium-server\nlistening to ",[50,35292,35293],{},"http://localhost:4444",[43,35295,35297],{"className":288,"code":35296,"language":290,"meta":48,"style":48},"DesiredCapabilities cap = new DesiredCapabilities(\"chrome\", \"20\", Platform.ANY);\n// url of selenium-hub/server\nURL url = new URL(\"http://localhost:4444\");\nWebDriver driver = new RemoteWebDriver(url, cap);\ndriver.open(targetSystemUrl);\ndriver.findElement(By.id(\"number\")).sendKeys(\"42\");\n...\n",[50,35298,35299,35304,35309,35314,35319,35324,35329],{"__ignoreMap":48},[53,35300,35301],{"class":55,"line":56},[53,35302,35303],{},"DesiredCapabilities cap = new DesiredCapabilities(\"chrome\", \"20\", Platform.ANY);\n",[53,35305,35306],{"class":55,"line":86},[53,35307,35308],{},"// url of selenium-hub/server\n",[53,35310,35311],{"class":55,"line":126},[53,35312,35313],{},"URL url = new URL(\"http://localhost:4444\");\n",[53,35315,35316],{"class":55,"line":163},[53,35317,35318],{},"WebDriver driver = new RemoteWebDriver(url, cap);\n",[53,35320,35321],{"class":55,"line":186},[53,35322,35323],{},"driver.open(targetSystemUrl);\n",[53,35325,35326],{"class":55,"line":221},[53,35327,35328],{},"driver.findElement(By.id(\"number\")).sendKeys(\"42\");\n",[53,35330,35331],{"class":55,"line":242},[53,35332,6348],{},[649,35334,35336],{"id":35335},"execute-tests-with-different-browsers","Execute tests with different browsers",[18,35338,35339,35340,35343],{},"In order to be able to execute the same test in many browsers we decided to read browser-capabilities from a\n.properties file instead of defining them hardcoded within the test classes. The name of the file can be handed to the\ntest as system property to be able to control which browser to be used by adding\n",[50,35341,35342],{},"-DbrowserProperties=firefox.properties"," to the build commandline.",[43,35345,35347],{"className":288,"code":35346,"language":290,"meta":48,"style":48},"\nString fileName = System.getProperty(\"browserProperties\");\nProperties p = loadProperties(fileName);\nDesiredCapabilities c = new DesiredCapabilities(\n p.getProperty(\"browser.name\"),\n p.getProperty(\"browser.version\"),\n p.getProperty(\"browser.platform\")\n);\n\n",[50,35348,35349,35353,35358,35363,35368,35373,35378,35383],{"__ignoreMap":48},[53,35350,35351],{"class":55,"line":56},[53,35352,500],{"emptyLinePlaceholder":499},[53,35354,35355],{"class":55,"line":86},[53,35356,35357],{},"String fileName = System.getProperty(\"browserProperties\");\n",[53,35359,35360],{"class":55,"line":126},[53,35361,35362],{},"Properties p = loadProperties(fileName);\n",[53,35364,35365],{"class":55,"line":163},[53,35366,35367],{},"DesiredCapabilities c = new DesiredCapabilities(\n",[53,35369,35370],{"class":55,"line":186},[53,35371,35372],{}," p.getProperty(\"browser.name\"),\n",[53,35374,35375],{"class":55,"line":221},[53,35376,35377],{}," p.getProperty(\"browser.version\"),\n",[53,35379,35380],{"class":55,"line":242},[53,35381,35382],{}," p.getProperty(\"browser.platform\")\n",[53,35384,35385],{"class":55,"line":273},[53,35386,1079],{},[18,35388,35389,35390,35395],{},"I use this approach to bind the ",[585,35391,35394],{"href":35392,"rel":35393},"http://maven.apache.org/surefire/maven-surefire-plugin/",[589],"maven-surefire-plugin","\nmultiple times to the test lifecycle phase: once for each browser.",[43,35397,35399],{"className":3792,"code":35398,"language":3794,"meta":48,"style":48},"\u003Cplugin>\n \u003Cgroupid>org.apache.maven.plugins\u003C/groupid>\n \u003Cartifactid>maven-surefire-plugin\u003C/artifactid>\n \u003Cversion>2.12\u003C/version>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cid>default-test\u003C/id>\n \u003Cphase>test\u003C/phase>\n \u003Cgoals>\u003Cgoal>test\u003C/goal>\u003C/goals>\n \u003Cconfiguration>\n \u003Csystempropertyvariables>\n \u003Cbrowserproperties>chrome.properties\u003C/browserproperties>\n \u003C/systempropertyvariables>\n \u003Creportnamesuffix>chrome\u003C/reportnamesuffix>\n \u003C/configuration>\n \u003C/execution>\n \u003Cexecution>\n \u003Cid>test-firefox\u003C/id>\n \u003Cphase>test\u003C/phase>\n \u003Cgoals>\u003Cgoal>test\u003C/goal>\u003C/goals>\n \u003Cconfiguration>\n \u003Csystempropertyvariables>\n \u003Cbrowserproperties>firefox.properties\u003C/browserproperties>\n \u003C/systempropertyvariables>\n \u003Creportnamesuffix>firefox\u003C/reportnamesuffix>\n \u003C/configuration>\n \u003C/execution>\n \u003C!-- ... more browsers here ... -->\n \u003C/executions>\n\u003C/plugin>\n\n",[50,35400,35401,35405,35410,35415,35420,35424,35428,35433,35438,35443,35447,35452,35457,35462,35467,35471,35475,35479,35484,35488,35492,35496,35500,35505,35509,35514,35518,35522,35527,35531],{"__ignoreMap":48},[53,35402,35403],{"class":55,"line":56},[53,35404,22511],{},[53,35406,35407],{"class":55,"line":86},[53,35408,35409],{}," \u003Cgroupid>org.apache.maven.plugins\u003C/groupid>\n",[53,35411,35412],{"class":55,"line":126},[53,35413,35414],{}," \u003Cartifactid>maven-surefire-plugin\u003C/artifactid>\n",[53,35416,35417],{"class":55,"line":163},[53,35418,35419],{}," \u003Cversion>2.12\u003C/version>\n",[53,35421,35422],{"class":55,"line":186},[53,35423,22551],{},[53,35425,35426],{"class":55,"line":221},[53,35427,22556],{},[53,35429,35430],{"class":55,"line":242},[53,35431,35432],{}," \u003Cid>default-test\u003C/id>\n",[53,35434,35435],{"class":55,"line":273},[53,35436,35437],{}," \u003Cphase>test\u003C/phase>\n",[53,35439,35440],{"class":55,"line":279},[53,35441,35442],{}," \u003Cgoals>\u003Cgoal>test\u003C/goal>\u003C/goals>\n",[53,35444,35445],{"class":55,"line":496},[53,35446,30113],{},[53,35448,35449],{"class":55,"line":503},[53,35450,35451],{}," \u003Csystempropertyvariables>\n",[53,35453,35454],{"class":55,"line":509},[53,35455,35456],{}," \u003Cbrowserproperties>chrome.properties\u003C/browserproperties>\n",[53,35458,35459],{"class":55,"line":515},[53,35460,35461],{}," \u003C/systempropertyvariables>\n",[53,35463,35464],{"class":55,"line":521},[53,35465,35466],{}," \u003Creportnamesuffix>chrome\u003C/reportnamesuffix>\n",[53,35468,35469],{"class":55,"line":527},[53,35470,30138],{},[53,35472,35473],{"class":55,"line":533},[53,35474,22581],{},[53,35476,35477],{"class":55,"line":539},[53,35478,22556],{},[53,35480,35481],{"class":55,"line":545},[53,35482,35483],{}," \u003Cid>test-firefox\u003C/id>\n",[53,35485,35486],{"class":55,"line":2414},[53,35487,35437],{},[53,35489,35490],{"class":55,"line":2426},[53,35491,35442],{},[53,35493,35494],{"class":55,"line":2438},[53,35495,30113],{},[53,35497,35498],{"class":55,"line":2451},[53,35499,35451],{},[53,35501,35502],{"class":55,"line":2459},[53,35503,35504],{}," \u003Cbrowserproperties>firefox.properties\u003C/browserproperties>\n",[53,35506,35507],{"class":55,"line":2470},[53,35508,35461],{},[53,35510,35511],{"class":55,"line":2476},[53,35512,35513],{}," \u003Creportnamesuffix>firefox\u003C/reportnamesuffix>\n",[53,35515,35516],{"class":55,"line":2484},[53,35517,30138],{},[53,35519,35520],{"class":55,"line":2490},[53,35521,22581],{},[53,35523,35524],{"class":55,"line":2495},[53,35525,35526],{}," \u003C!-- ... more browsers here ... -->\n",[53,35528,35529],{"class":55,"line":2507},[53,35530,22586],{},[53,35532,35533],{"class":55,"line":2528},[53,35534,22591],{},[18,35536,35537,35538,35543,35544,32831],{},"An alternative to this could be to wrap the configuration\nwithin ",[585,35539,35542],{"href":35540,"rel":35541},"http://maven.apache.org/guides/introduction/introduction-to-profiles.html",[589],"Maven profiles"," so that you can\ncontrol which browsers should be used by ",[50,35545,35546],{},"mvn test -Pfirefox",[18,35548,35549],{},"Since the test-code that reads the properties has defaults that make sense, it’s still also possible to execute the\ntest using the IDE of your choice. For example simply default to requesting a chrome on any platform.",[18,35551,35552,35553,35556,35557,35560,35561,35563],{},"It worked out pretty well to have the infrastructural code within an abstract baseclass for all tests. This class uses\n",[50,35554,35555],{},"@Befor","e or ",[50,35558,35559],{},"@BeforeClass"," to read needed capabilities from a properties-file, set up ",[50,35562,35256],{}," and point it\nto the system under test. Of course something like a JUnit Rule would also work.",[18,35565,35566,35567,35570],{},"Either way the actual tests don’t have to care about how the WebDriver they use is instanciated: They just use the\nbaseclasses ",[50,35568,35569],{},"getDriver()"," and execute the real testing-code.",[649,35572,35574],{"id":35573},"next-part","Next Part",[18,35576,35577],{},"In the next part I will show you how the Selenium Grid can be set up in good way so that you can make your local system\nadministration happy o/. So again… stay tuned.",[607,35579,989],{},{"title":48,"searchDepth":86,"depth":86,"links":35581},[35582],{"id":35237,"depth":86,"text":35238,"children":35583},[35584,35585,35586,35587],{"id":35217,"depth":126,"text":35256},{"id":35280,"depth":126,"text":35281},{"id":35335,"depth":126,"text":35336},{"id":35573,"depth":126,"text":35574},[613],"2013-01-29T08:44:29","In the first part of the series I gave some reasons why to do acceptance\\ntesting (or webtests) as well as a rough overview how we do it at synyx. This part is rather technical and describes how\\nto use Seleniums RemoteWebDriver to control browsers on a remote host.","https://synyx.de/blog/remote-browsers/",{},"/blog/remote-browsers",{"title":35222,"description":35595},"In the first part of the series I gave some reasons why to do acceptance\ntesting (or webtests) as well as a rough overview how we do it at synyx. This part is rather technical and describes how\nto use Seleniums RemoteWebDriver to control browsers on a remote host.","remote-browsers","blog/remote-browsers",[35215,32983,35217,32985,21474,35599],"webdriver","In the first part of the series I gave some reasons why to do acceptance testing (or webtests) as well as a rough overview how we do it at synyx.…","Iij9QBAjGU3XNz4FqFyhKCr9XEjfEWyafsOQsH6TnHI",{"id":35603,"title":35604,"author":35605,"body":35606,"category":35712,"date":35713,"description":48,"extension":617,"link":35714,"meta":35715,"navigation":499,"path":35716,"seo":35717,"slug":35718,"stem":35719,"tags":35720,"teaser":35723,"__hash__":35724},"blog/blog/atdd-at-synyx.md","Acceptance testing at synyx – Part 1",[12981],{"type":11,"value":35607,"toc":35706},[35608,35611,35615,35624,35627,35631,35641,35644,35647,35656,35659,35663,35666,35703],[14,35609,35604],{"id":35610},"acceptance-testing-at-synyx-part-1",[2207,35612,35614],{"id":35613},"overview-why-and-how-we-do-web-testing","Overview – Why and how we do web-testing",[18,35616,35617,35618,35623],{},"In my team at synyx we wrote a lot of tests in 2012. Most of the tests were unit-tests (as a consequence of TDD), some\nstuff is also tested as integration-tests (sometimes because the stuff was hard to test as unit-tests, sometimes as\naddition to them to verify that interactions of components work properly). I can\ntell ",[585,35619,35622],{"href":35620,"rel":35621},"http://de.wikipedia.org/wiki/TDD",[589],"TDD"," and the special focus on tests changed the way we work pretty much and of\ncourse boosted the quality of our applications even further. Its not that we did not write tests before, but once you\ndevelop test driven you can start to trust your code which makes refactorings (evolution) easy.",[18,35625,35626],{},"But there are always components that are hard to test. This includes code related to user interfaces, complete\nwork-flows and -sigh- Internet Explorer. So, at the end of 2012 we decided to give automated browser-tests another\nchange (we did evaluate and try this yeeeears ago but – for several reasons – we did not make good expieriences with\nit).",[649,35628,35630],{"id":35629},"arguments-to-do-it","Arguments to do it",[18,35632,35633,35634,35640],{},"Testing backend-components has become easy as soon as you are practiced writing tests and follow some design principles\nlike dependency injection. But usually, easy testing stops as soon as you enter the web-layer. Yes, I know its possible\nto ",[585,35635,35639],{"href":35636,"rel":35637,"title":35638},"http://blog.springsource.org/2012/11/12/spring-framework-3-2-rc1-spring-mvc-test-framework/",[589],"Spring MVC Test Framework","write tests for Spring MVC","\ncontrollers but going down this road always felt a bit weird. And even if you have these tests, you want to test the\nwhole thing (Controller, JSPs, Filters, Interceptors and what not) in an integrative way. So the best solution is\nrunning automated tests of the deployed application using a real browser.",[18,35642,35643],{},"In fact, since the browsers that display our applications differ in some details we even have to test the apps in many\nof them. Or, at least with those we want to ensure compability with our applications. For example, some of the bugs that\nwere reported for our last application only affect one of the browsers out there (mostly a particular version of\nInternet Explorer). These bugs were not detected early because developers/qa tend not to test everything in every\nbrowser – especially if they have to log on to one or more remote windows machines in order to do so. Lately, the amount\nof JavaScript that is used within our software increases, hence, this gets even more important.",[18,35645,35646],{},"The last and one of the most important arguments for webtests is that they are acceptance tests and live in another\nscope. In contrast, unit and integration tests are more like whitebox-tests: I tend to say that the latter are for us\ndevelopers. They give us confidence and the freedom to safely extend and change our application. These tests are testing\nfrom the inside and have knowledge of the system. They do not really affect the business people (besides from some\nstrange cases where they request a certain amount of test coverage).",[18,35648,35649,35650,35655],{},"But acceptance tests do really focus the business value of the application. They usually test complete workflows or\n“features” of an application. The product owners user stories should have acceptance criteria that can be expressed as\nacceptance tests. The tests should not care about how these criterias are met but if. So acceptance tests are testing\nfrom the “outside” as a ",[585,35651,35654],{"href":35652,"rel":35653},"http://www.webopedia.com/TERM/B/Black_Box_Testing.html",[589],"complete blackbox"," (without knowledge\nof the internals of the application) test.",[18,35657,35658],{},"Of course these tests can be executed continuously and by this it can be ensured that the user story or feature works as\nexpected – and always will. So these tests are not only for us developers, they are for our clients. By the way this\nalso makes good and colourful reporting even more important.",[649,35660,35662],{"id":35661},"how-we-do-it-overview","How we do it – Overview",[18,35664,35665],{},"This post should be the beginning of a whole series that describes how we do web-testing at synyx. So after I gave a\nquick overview why we do it let me tell you how we do it in a high level overview. Afterwards there will be follow-up\nposts that describe the important aspects in more detail.",[577,35667,35668,35676,35679,35682,35685,35694,35697],{},[580,35669,35670,35671],{},"Tests are written in Java/JUnit using ",[585,35672,35675],{"href":35673,"rel":35674},"http://seleniumhq.org/",[589],"Selenium Webdriver",[580,35677,35678],{},"Seleniums RemoteWebdriver allows the browser to run on another host as the test",[580,35680,35681],{},"The grid functionality of selenium-server is used to be able to request a big variation of different browsers and\nversions using the same initialization-strategy and – of course – to scale up",[580,35683,35684],{},"The tests are executed automatically several times – once for each browser we want to ensure compability with",[580,35686,35687,35688,35693],{},"Tests are written in ",[585,35689,35692],{"href":35690,"rel":35691},"http://dannorth.net/introducing-bdd/",[589],"BDD-style"," and use abstractions of actions (Steps) and\npages",[580,35695,35696],{},"Tests are reported in a nice “manager-friendly” way including pie charts and screenshots",[580,35698,35699,35702],{},[585,35700,35246],{"href":35244,"rel":35701},[589]," executes these tests and generates the report continuously against a system that is\nautomatically deployed (continuous deployment)",[18,35704,35705],{},"So stay tuned for detailed information about ATDD / webtests at synyx during the next weeks.",{"title":48,"searchDepth":86,"depth":86,"links":35707},[35708],{"id":35613,"depth":86,"text":35614,"children":35709},[35710,35711],{"id":35629,"depth":126,"text":35630},{"id":35661,"depth":126,"text":35662},[613],"2013-01-23T10:47:24","https://synyx.de/blog/atdd-at-synyx/",{},"/blog/atdd-at-synyx",{"title":35604,"description":48},"atdd-at-synyx","blog/atdd-at-synyx",[29676,35721,12448,32985,35722,21474,35599],"atdd","tdd","Overview – Why and how we do web-testing In my team at synyx we wrote a lot of tests in 2012. Most of the tests were unit-tests (as a consequence…","YNjddH_uDwOTD_ywq9wdz-CWj4psWVBGYIAR-kxli38",{"id":35726,"title":35727,"author":35728,"body":35729,"category":36146,"date":36147,"description":36148,"extension":617,"link":36149,"meta":36150,"navigation":499,"path":36151,"seo":36152,"slug":35733,"stem":36154,"tags":36155,"teaser":36156,"__hash__":36157},"blog/blog/visualize-javascript-code-quality-and-code-coverage-with-sonar-part-2.md","Visualize JavaScript code quality and code coverage with Sonar – part 2",[6892],{"type":11,"value":35730,"toc":36142},[35731,35734,35744,35747,35753,35757,35760,35769,35772,35781,35784,35839,35842,35845,35848,35994,35997,36032,36035,36039,36042,36140],[14,35732,35727],{"id":35733},"visualize-javascript-code-quality-and-code-coverage-with-sonar-part-2",[18,35735,35736,35737,35743],{},"In\nmy ",[585,35738,35742],{"href":35739,"rel":35740,"title":35741},"http://blog.synyx.de/2012/08/visualize-javascript-code-quality-and-code-coverage-with-sonar/",[589],"Visualize JavaScript code quality and code coverage with Sonar","previous post","\nI wrote about the Sonar JavaScript-Plugin, JsTestDriver, jstd-maven-plugin and some problems with the configuration.\nMeanwhile we’ve got a working setup which I want to explain in this blog.",[18,35745,35746],{},"For the impatient ones among us, there is a sample project available on github:",[18,35748,35749],{},[585,35750,35751],{"href":35751,"rel":35752},"https://github.com/synyx/JavaJsSonarDemo",[589],[2207,35754,35756],{"id":35755},"run-javascript-tests-wit-jstestdriver","Run JavaScript-Tests wit JsTestDriver",[18,35758,35759],{},"The first problem was, that the opened browser-tab by the jstd runner was not closed anymore. In the previous setup\njstd was configured to start the test server during the maven test goal. Now we have a jstd server instance running on a\nremote computer with already captured browsers. Therefore you just have to execute",[43,35761,35763],{"className":13786,"code":35762,"language":13788,"meta":48,"style":48},"java -jar JsTestDriver.jar --port 9876\n",[50,35764,35765],{"__ignoreMap":48},[53,35766,35767],{"class":55,"line":56},[53,35768,35762],{},[18,35770,35771],{},"in the terminal, start the browsers of your choice and load the url",[43,35773,35775],{"className":13786,"code":35774,"language":13788,"meta":48,"style":48},"http://localhost:9876/capture\n",[50,35776,35777],{"__ignoreMap":48},[53,35778,35779],{"class":55,"line":56},[53,35780,35774],{},[18,35782,35783],{},"Now the browsers will listen to the jstd server to run the tests. No more tabs will be opened which we would have to\nclose manually. This works pretty well, as long as you use the JsTestDriver.jar to run your tests from the terminal. But\nif you use jstd-maven-plugin you will get following error:",[43,35785,35787],{"className":13786,"code":35786,"language":13788,"meta":48,"style":48},"java.lang.RuntimeException: Connection error on : sun.net.www.protocol.http.HttpURLConnection:http://10.0.15.24:9876/fileSet\n at com.google.jstestdriver.HttpServer.post(HttpServer.java:96)\n at com.google.jstestdriver.FileUploader.determineServerFileSet(FileUploader.java:174)\n at com.google.jstestdriver.action.UploadAction.run(UploadAction.java:38)\n at com.google.jstestdriver.ActionRunner.runActions(ActionRunner.java:64)\n at com.google.jstestdriver.JsTestDriver.main(JsTestDriver.java:86)\nCaused by: java.io.IOException: Server returned HTTP response code: 405 for URL: http://10.0.15.24:9876/fileSet at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1403)\n at com.google.jstestdriver.HttpServer.post(HttpServer.java:92)\n ... 4 more\nUnexpected Runner Condition: Connection error on: sun.net.www.protocol.http.HttpURLConnection:http://10.0.15.24:9876/fileSet\n",[50,35788,35789,35794,35799,35804,35809,35814,35819,35824,35829,35834],{"__ignoreMap":48},[53,35790,35791],{"class":55,"line":56},[53,35792,35793],{},"java.lang.RuntimeException: Connection error on : sun.net.www.protocol.http.HttpURLConnection:http://10.0.15.24:9876/fileSet\n",[53,35795,35796],{"class":55,"line":86},[53,35797,35798],{}," at com.google.jstestdriver.HttpServer.post(HttpServer.java:96)\n",[53,35800,35801],{"class":55,"line":126},[53,35802,35803],{}," at com.google.jstestdriver.FileUploader.determineServerFileSet(FileUploader.java:174)\n",[53,35805,35806],{"class":55,"line":163},[53,35807,35808],{}," at com.google.jstestdriver.action.UploadAction.run(UploadAction.java:38)\n",[53,35810,35811],{"class":55,"line":186},[53,35812,35813],{}," at com.google.jstestdriver.ActionRunner.runActions(ActionRunner.java:64)\n",[53,35815,35816],{"class":55,"line":221},[53,35817,35818],{}," at com.google.jstestdriver.JsTestDriver.main(JsTestDriver.java:86)\n",[53,35820,35821],{"class":55,"line":242},[53,35822,35823],{},"Caused by: java.io.IOException: Server returned HTTP response code: 405 for URL: http://10.0.15.24:9876/fileSet at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1403)\n",[53,35825,35826],{"class":55,"line":273},[53,35827,35828],{}," at com.google.jstestdriver.HttpServer.post(HttpServer.java:92)\n",[53,35830,35831],{"class":55,"line":279},[53,35832,35833],{}," ... 4 more\n",[53,35835,35836],{"class":55,"line":496},[53,35837,35838],{},"Unexpected Runner Condition: Connection error on: sun.net.www.protocol.http.HttpURLConnection:http://10.0.15.24:9876/fileSet\n",[18,35840,35841],{},"The problem is that jstd-maven-plugin uses an older version of jstd. With the current version 1.3.5 everything works\nas expected. But how can we tell jstd-maven-plugin to use the new version of jstd?",[18,35843,35844],{},"Well, you can simply add a property \u003Cjstd.jar> to your pom.xml properties and point to the jar file.",[18,35846,35847],{},"Since jstd-maven-plugin has a dependeny to jstd 1.3.2 this old jar will still be in our classpath. So we have to\ntell jstd-maven-plugin to use jstd 1.3.5 instead of the outdated one.",[43,35849,35851],{"className":13786,"code":35850,"language":13788,"meta":48,"style":48},"\u003Cdependencies>\n ...\n \u003Cdependency>\n \u003CgroupId>com.google.jstestdriver\u003C/groupId>\n \u003CartifactId>jstestdriver\u003C/artifactId>\n \u003Cversion>1.3.5\u003C/version>\n \u003Cscope>test\u003C/scope>\n \u003C/dependency>\n ...\n\u003C/dependencies>\n\u003Cbuild>\n ...\n \u003CpluginManagement>\n \u003Cplugins>\n \u003Cplugin>\n \u003CgroupId>com.googlecode.jstd-maven-plugin\u003C/groupId>\n \u003CartifactId>jstd-maven-plugin\u003C/artifactId>\n \u003Cversion>1.3.2.5\u003C/version>\n \u003Cdependencies>\n \u003Cdependency>\n \u003CgroupId>com.google.jstestdriver\u003C/groupId>\n \u003CartifactId>jstestdriver\u003C/artifactId>\n \u003Cversion>1.3.5\u003C/version>\n \u003C/dependency>\n \u003C/dependencies>\n \u003C/plugin>\n \u003C/plugins>\n \u003C/pluginManagement>\n ...\n\u003C/build>\n",[50,35852,35853,35857,35861,35866,35871,35876,35881,35886,35891,35895,35899,35903,35907,35912,35917,35922,35927,35932,35937,35941,35946,35951,35956,35961,35966,35971,35976,35981,35986,35990],{"__ignoreMap":48},[53,35854,35855],{"class":55,"line":56},[53,35856,3801],{},[53,35858,35859],{"class":55,"line":86},[53,35860,276],{},[53,35862,35863],{"class":55,"line":126},[53,35864,35865],{}," \u003Cdependency>\n",[53,35867,35868],{"class":55,"line":163},[53,35869,35870],{}," \u003CgroupId>com.google.jstestdriver\u003C/groupId>\n",[53,35872,35873],{"class":55,"line":186},[53,35874,35875],{}," \u003CartifactId>jstestdriver\u003C/artifactId>\n",[53,35877,35878],{"class":55,"line":221},[53,35879,35880],{}," \u003Cversion>1.3.5\u003C/version>\n",[53,35882,35883],{"class":55,"line":242},[53,35884,35885],{}," \u003Cscope>test\u003C/scope>\n",[53,35887,35888],{"class":55,"line":273},[53,35889,35890],{}," \u003C/dependency>\n",[53,35892,35893],{"class":55,"line":279},[53,35894,276],{},[53,35896,35897],{"class":55,"line":496},[53,35898,3925],{},[53,35900,35901],{"class":55,"line":503},[53,35902,30083],{},[53,35904,35905],{"class":55,"line":509},[53,35906,276],{},[53,35908,35909],{"class":55,"line":515},[53,35910,35911],{}," \u003CpluginManagement>\n",[53,35913,35914],{"class":55,"line":521},[53,35915,35916],{}," \u003Cplugins>\n",[53,35918,35919],{"class":55,"line":527},[53,35920,35921],{}," \u003Cplugin>\n",[53,35923,35924],{"class":55,"line":533},[53,35925,35926],{}," \u003CgroupId>com.googlecode.jstd-maven-plugin\u003C/groupId>\n",[53,35928,35929],{"class":55,"line":539},[53,35930,35931],{}," \u003CartifactId>jstd-maven-plugin\u003C/artifactId>\n",[53,35933,35934],{"class":55,"line":545},[53,35935,35936],{}," \u003Cversion>1.3.2.5\u003C/version>\n",[53,35938,35939],{"class":55,"line":2414},[53,35940,27083],{},[53,35942,35943],{"class":55,"line":2426},[53,35944,35945],{}," \u003Cdependency>\n",[53,35947,35948],{"class":55,"line":2438},[53,35949,35950],{}," \u003CgroupId>com.google.jstestdriver\u003C/groupId>\n",[53,35952,35953],{"class":55,"line":2451},[53,35954,35955],{}," \u003CartifactId>jstestdriver\u003C/artifactId>\n",[53,35957,35958],{"class":55,"line":2459},[53,35959,35960],{}," \u003Cversion>1.3.5\u003C/version>\n",[53,35962,35963],{"class":55,"line":2470},[53,35964,35965],{}," \u003C/dependency>\n",[53,35967,35968],{"class":55,"line":2476},[53,35969,35970],{}," \u003C/dependencies>\n",[53,35972,35973],{"class":55,"line":2484},[53,35974,35975],{}," \u003C/plugin>\n",[53,35977,35978],{"class":55,"line":2490},[53,35979,35980],{}," \u003C/plugins>\n",[53,35982,35983],{"class":55,"line":2495},[53,35984,35985],{}," \u003C/pluginManagement>\n",[53,35987,35988],{"class":55,"line":2507},[53,35989,276],{},[53,35991,35992],{"class":55,"line":2528},[53,35993,30256],{},[18,35995,35996],{},"If you try to build the maven project now, you will notice that jstd 1.3.5 can’t be found in the maven central\nrepository, unfortunately. So you have to add the jar to your local maven repo or to your nexus. To add it to your local\nrepo execute this command in the terminal:",[43,35998,36000],{"className":13786,"code":35999,"language":13788,"meta":48,"style":48},"mvn install:install-file \\\n -Dfile=\u003Cpath.to.jstd.jar> \\\n -DgroupId=com.google.jstestdriver \\\n -DartifactId=jstestdriver \\\n -Dversion=1.3.5 \\\n -Dpackaging=jar\n",[50,36001,36002,36007,36012,36017,36022,36027],{"__ignoreMap":48},[53,36003,36004],{"class":55,"line":56},[53,36005,36006],{},"mvn install:install-file \\\n",[53,36008,36009],{"class":55,"line":86},[53,36010,36011],{}," -Dfile=\u003Cpath.to.jstd.jar> \\\n",[53,36013,36014],{"class":55,"line":126},[53,36015,36016],{}," -DgroupId=com.google.jstestdriver \\\n",[53,36018,36019],{"class":55,"line":163},[53,36020,36021],{}," -DartifactId=jstestdriver \\\n",[53,36023,36024],{"class":55,"line":186},[53,36025,36026],{}," -Dversion=1.3.5 \\\n",[53,36028,36029],{"class":55,"line":221},[53,36030,36031],{}," -Dpackaging=jar\n",[18,36033,36034],{},"Now the JavaScript-Tests are automatically run by JsTestDriver during the maven build.",[2207,36036,36038],{"id":36037},"analyse-java-and-javascript-sources-with-sonar","Analyse Java and JavaScript sources with Sonar",[18,36040,36041],{},"The second problem I mentioned in my previous post was the specification of the sourceDirectory for Sonar to be able to\nfetch the JavaScript sources. Hereby the java source directory will be overridden, of course, and the maven build will\nfail. I solved this with a maven profile which sets the source directory, the language that sonar should analyse and a\nbranch name for the JavaScript analysis (otherwise the Java analysis will be overridden). Furthermore you have to tell\nmaven to skip the java tests since the sources cannot be found anymore due the sourceDirectory change.",[43,36043,36045],{"className":13786,"code":36044,"language":13788,"meta":48,"style":48},"\u003Cproperties>\n ...\n \u003CsrcDir>src/main/java\u003C/srcDir>\n ...\n\u003C/properties>\n\u003Cbuild>\n ...\n \u003CsourceDirectory>${srcDir}\u003C/sourceDirectory>\n ...\n\u003C/build>\n\u003Cprofiles>\n \u003Cprofile>\n \u003Cid>sonarJsEnabled\u003C/id>\n \u003Cproperties>\n \u003CsrcDir>src/main/webapp/js\u003C/srcDir>\n \u003Cmaven.test.skip>true\u003C/maven.test.skip>\n \u003Csonar.language>js\u003C/sonar.language>\n \u003Csonar.branch>js\u003C/sonar.branch>\n \u003C/properties>\n \u003C/profile>\n\u003C/profiles>\n",[50,36046,36047,36052,36056,36061,36065,36070,36074,36078,36083,36087,36091,36095,36099,36104,36108,36113,36118,36123,36128,36132,36136],{"__ignoreMap":48},[53,36048,36049],{"class":55,"line":56},[53,36050,36051],{},"\u003Cproperties>\n",[53,36053,36054],{"class":55,"line":86},[53,36055,276],{},[53,36057,36058],{"class":55,"line":126},[53,36059,36060],{}," \u003CsrcDir>src/main/java\u003C/srcDir>\n",[53,36062,36063],{"class":55,"line":163},[53,36064,276],{},[53,36066,36067],{"class":55,"line":186},[53,36068,36069],{},"\u003C/properties>\n",[53,36071,36072],{"class":55,"line":221},[53,36073,30083],{},[53,36075,36076],{"class":55,"line":242},[53,36077,276],{},[53,36079,36080],{"class":55,"line":273},[53,36081,36082],{}," \u003CsourceDirectory>${srcDir}\u003C/sourceDirectory>\n",[53,36084,36085],{"class":55,"line":279},[53,36086,276],{},[53,36088,36089],{"class":55,"line":496},[53,36090,30256],{},[53,36092,36093],{"class":55,"line":503},[53,36094,10191],{},[53,36096,36097],{"class":55,"line":509},[53,36098,30480],{},[53,36100,36101],{"class":55,"line":515},[53,36102,36103],{}," \u003Cid>sonarJsEnabled\u003C/id>\n",[53,36105,36106],{"class":55,"line":521},[53,36107,27058],{},[53,36109,36110],{"class":55,"line":527},[53,36111,36112],{}," \u003CsrcDir>src/main/webapp/js\u003C/srcDir>\n",[53,36114,36115],{"class":55,"line":533},[53,36116,36117],{}," \u003Cmaven.test.skip>true\u003C/maven.test.skip>\n",[53,36119,36120],{"class":55,"line":539},[53,36121,36122],{}," \u003Csonar.language>js\u003C/sonar.language>\n",[53,36124,36125],{"class":55,"line":545},[53,36126,36127],{}," \u003Csonar.branch>js\u003C/sonar.branch>\n",[53,36129,36130],{"class":55,"line":2414},[53,36131,27073],{},[53,36133,36134],{"class":55,"line":2426},[53,36135,30538],{},[53,36137,36138],{"class":55,"line":2438},[53,36139,10360],{},[607,36141,989],{},{"title":48,"searchDepth":86,"depth":86,"links":36143},[36144,36145],{"id":35755,"depth":86,"text":35756},{"id":36037,"depth":86,"text":36038},[613],"2012-11-22T17:00:57","In\\nmy previous post\\nI wrote about the Sonar JavaScript-Plugin, JsTestDriver, jstd-maven-plugin and some problems with the configuration.\\nMeanwhile we’ve got a working setup which I want to explain in this blog.","https://synyx.de/blog/visualize-javascript-code-quality-and-code-coverage-with-sonar-part-2/",{},"/blog/visualize-javascript-code-quality-and-code-coverage-with-sonar-part-2",{"title":35727,"description":36153},"In\nmy previous post\nI wrote about the Sonar JavaScript-Plugin, JsTestDriver, jstd-maven-plugin and some problems with the configuration.\nMeanwhile we’ve got a working setup which I want to explain in this blog.","blog/visualize-javascript-code-quality-and-code-coverage-with-sonar-part-2",[],"In my previous post I wrote about the Sonar JavaScript-Plugin, JsTestDriver, jstd-maven-plugin and some problems with the configuration. Meanwhile we’ve got a working setup which I want to explain in…","dZPmT4RqM5AjLvjCHcsD4nNol7MHvdL4Dy92jpUsG_g",{"id":36159,"title":36160,"author":36161,"body":36162,"category":36252,"date":36253,"description":36169,"extension":617,"link":36254,"meta":36255,"navigation":499,"path":36256,"seo":36257,"slug":36166,"stem":36258,"tags":36259,"teaser":36262,"__hash__":36263},"blog/blog/problem-mit-maven-3-dependency-resolution.md","Problem mit Maven 3 Dependency Resolution",[6885],{"type":11,"value":36163,"toc":36247},[36164,36167,36170,36173,36179,36188,36191,36195,36200,36209,36216,36219,36223,36229,36232,36237,36242,36245],[14,36165,36160],{"id":36166},"problem-mit-maven-3-dependency-resolution",[18,36168,36169],{},"Im Zuge meiner Bachelorarbeit habe ich ein Projekt von Maven 2 nach Maven 3 migriert.",[18,36171,36172],{},"Dabei bin ich beim bauen des Projekts mit Maven 3 ein paar mal über eine etwas verwirrende Fehlermeldung gestolpert.",[649,36174,36176],{"id":36175},"das-problem",[27,36177,36178],{},"Das Problem:",[43,36180,36182],{"className":13786,"code":36181,"language":13788,"meta":48,"style":48},"Could not find artifact XXX in nexus.synyx.de (http://nexus.synyx.de)\n",[50,36183,36184],{"__ignoreMap":48},[53,36185,36186],{"class":55,"line":56},[53,36187,36181],{},[18,36189,36190],{},"…und das obwohl das Artefakt in der richtigen Version sowohl lokal als auch im Remote-Repository vorhanden ist.",[649,36192,36194],{"id":36193},"die-ursache","Die Ursache:",[18,36196,36197],{},[573,36198,36199],{},"It’s not a bug, it’s a feature!",[18,36201,36202,36203,36208],{},"Nach etwas Recherche hat sich dann herausgestellt, dass es sich dabei um\nein ",[585,36204,36207],{"href":36205,"rel":36206},"https://cwiki.apache.org/confluence/display/MAVEN/Maven+3.x+Compatibility+Notes#Maven3.xCompatibilityNotes-ResolutionfromLocalRepository",[589],"Maven-3-Feature","\nhandelt mit – sagen wir – unglücklich gewählter Fehlermeldung.",[18,36210,36211,36212,36215],{},"Seit Maven 3 merkt sich Maven in den _",[573,36213,36214],{},"maven.repositories","-Dateien für jedes Artefakt auch das Repo aus dem es\nruntergeladen wurde. Das stellt unter anderem sicher, dass auch wirklich das richtige Artefakt benutzt wird. Außerdem\nwerden so auch nicht eingetragene Repositories aufgedeckt.",[18,36217,36218],{},"Allerdings kann es durch die wenig aussagekräftige Fehlermeldung bei Benutzern leicht zur Verwirrung kommen.",[649,36220,36222],{"id":36221},"die-lösung","Die Lösung:",[18,36224,36225,36226,36228],{},"Tritt dieses Probelm auf sollte man als erstes kontrollieren ob alle Repositories in der POM und der ",[573,36227,10435],{},"\nrichtig angegeben sind und dies dann gegebenenfalls nachholen.",[18,36230,36231],{},"Sollte die Fehlermeldung immer noch auftreten hilft ein:",[18,36233,36234],{},[50,36235,36236],{},"cd ~/.m2/repository/PFAD/ZUM/ARTEFAKT/",[18,36238,36239],{},[50,36240,36241],{},"rm _maven.repositories",[18,36243,36244],{},"Ein weiterer Grund die Datei zu löschen wäre wenn man z.B. explizit wünscht, dass das Artefakt aus dem lokalen\nMaven-Repository benutzt wird.",[607,36246,989],{},{"title":48,"searchDepth":86,"depth":86,"links":36248},[36249,36250,36251],{"id":36175,"depth":126,"text":36178},{"id":36193,"depth":126,"text":36194},{"id":36221,"depth":126,"text":36222},[613],"2012-11-05T17:16:49","https://synyx.de/blog/problem-mit-maven-3-dependency-resolution/",{},"/blog/problem-mit-maven-3-dependency-resolution",{"title":36160,"description":36169},"blog/problem-mit-maven-3-dependency-resolution",[36260,36261],"dependency-resolution","maven-3","Im Zuge meiner Bachelorarbeit habe ich ein Projekt von Maven 2 nach Maven 3 migriert. Dabei bin ich beim bauen des Projekts mit Maven 3 ein paar mal über eine…","m-ajT4OvhJaUs7SUTw6uOawkbuMCmTKUzn_TrvUJq_k",{"id":36265,"title":36266,"author":36267,"body":36268,"category":36652,"date":36653,"description":36654,"extension":617,"link":36655,"meta":36656,"navigation":499,"path":36657,"seo":36658,"slug":36272,"stem":36660,"tags":36661,"teaser":36664,"__hash__":36665},"blog/blog/properly-calculating-time-differences-in-javascript.md","Properly calculating time differences in JavaScript",[28513],{"type":11,"value":36269,"toc":36650},[36270,36273,36280,36300,36329,36332,36367,36370,36384,36392,36431,36434,36454,36465,36474,36493,36508,36569,36575,36612,36625,36637,36648],[14,36271,36266],{"id":36272},"properly-calculating-time-differences-in-javascript",[18,36274,36275,36276,36279],{},"Let me tell you a tale about a fat-client application that has nice some time-related logic written in JavaScript. We\nwant to calculate the difference between two dates, measured in days. Easy, you say, just use the ",[573,36277,36278],{},"Date","object and do\nsome calculating.",[18,36281,36282,36283,36286,36287,36290,36291,36293,36294,36296,36297,36299],{},"As a JavaScript veteran you know that you have to use ",[50,36284,36285],{},"new Date()"," instead of ",[50,36288,36289],{},"Date()"," because the second one returns a\nstring for some reason, you recall that the month of October is identified by the number ",[50,36292,20387],{}," because we start counting\nthe months starting at ",[50,36295,2282],{}," and quickly figure out that subtracting two ",[50,36298,36278],{}," objects results in a number which is the\namount of milliseconds passed between two moments.",[43,36301,36303],{"className":13786,"code":36302,"language":13788,"meta":48,"style":48},"\nvar DAY_IN_MS = 24 * 60 * 60 * 1000;\nvar d1 = new Date(2012, 9, 27);\nvar d2 = new Date(2012, 9, 28);\nconsole.log((d2 - d1) / DAY_IN_MS); // yields 1\n\n",[50,36304,36305,36309,36314,36319,36324],{"__ignoreMap":48},[53,36306,36307],{"class":55,"line":56},[53,36308,500],{"emptyLinePlaceholder":499},[53,36310,36311],{"class":55,"line":86},[53,36312,36313],{},"var DAY_IN_MS = 24 * 60 * 60 * 1000;\n",[53,36315,36316],{"class":55,"line":126},[53,36317,36318],{},"var d1 = new Date(2012, 9, 27);\n",[53,36320,36321],{"class":55,"line":163},[53,36322,36323],{},"var d2 = new Date(2012, 9, 28);\n",[53,36325,36326],{"class":55,"line":186},[53,36327,36328],{},"console.log((d2 - d1) / DAY_IN_MS); // yields 1\n",[18,36330,36331],{},"Looks fine, doesn’t it? So just wrap it in a function, unit-test it and be done with it? Not so fast there. Let’s just\nchange the dates ever so slightly",[43,36333,36335],{"className":13786,"code":36334,"language":13788,"meta":48,"style":48},"\nvar DAY_IN_MS = 24 * 60 * 60 * 1000;\nvar d1 = new Date(2012, 9, 27);\nvar d2 = new Date(2012, 9, 28);\nvar d3 = new Date(2012, 9, 29);\nconsole.log((d2 - d1) / DAY_IN_MS); // yields 1\nconsole.log((d3 - d2) / DAY_IN_MS); // yields 1.0416666666666667\n\n",[50,36336,36337,36341,36345,36349,36353,36358,36362],{"__ignoreMap":48},[53,36338,36339],{"class":55,"line":56},[53,36340,500],{"emptyLinePlaceholder":499},[53,36342,36343],{"class":55,"line":86},[53,36344,36313],{},[53,36346,36347],{"class":55,"line":126},[53,36348,36318],{},[53,36350,36351],{"class":55,"line":163},[53,36352,36323],{},[53,36354,36355],{"class":55,"line":186},[53,36356,36357],{},"var d3 = new Date(2012, 9, 29);\n",[53,36359,36360],{"class":55,"line":221},[53,36361,36328],{},[53,36363,36364],{"class":55,"line":242},[53,36365,36366],{},"console.log((d3 - d2) / DAY_IN_MS); // yields 1.0416666666666667\n",[18,36368,36369],{},"This is the point where most developers start cursing. Is this a new way in which JavaScript is broken? It isn’t,\nbecause the number is completely accurate.",[18,36371,36372,36373,36376,36377,99,36380,36383],{},"The JavaScript object created by ",[50,36374,36375],{},"new Date(2012, 9, 28)"," represents midnight on the 28th of October, 2012 ",[573,36378,36379],{},"in your local\ntime zone",[50,36381,36382],{},"new Date(2012, 9, 29)"," represents midnight the following day.",[18,36385,36386,36387,986],{},"Subtracting the first from the seconds yields the number of milliseconds that have passed between those two moments,\nwhich, as you probably have guessed, includes the extra hour put in because\nof ",[585,36388,36391],{"href":36389,"rel":36390},"http://www.timeanddate.com/worldclock/clockchange.html?n=37",[589],"daylight savings time",[43,36393,36395],{"className":13786,"code":36394,"language":13788,"meta":48,"style":48},"\n> new Date(2012, 9, 29);\nMon Oct 29 2012 00:00:00 GMT+0100 (CET)\n> new Date(2012, 9, 28);\nSun Oct 28 2012 00:00:00 GMT+0200 (CEST)\n> (new Date(2012, 9, 29) - new Date(2012, 9, 28)) / 60 / 60 / 100\n25\n\n",[50,36396,36397,36401,36406,36411,36416,36421,36426],{"__ignoreMap":48},[53,36398,36399],{"class":55,"line":56},[53,36400,500],{"emptyLinePlaceholder":499},[53,36402,36403],{"class":55,"line":86},[53,36404,36405],{},"> new Date(2012, 9, 29);\n",[53,36407,36408],{"class":55,"line":126},[53,36409,36410],{},"Mon Oct 29 2012 00:00:00 GMT+0100 (CET)\n",[53,36412,36413],{"class":55,"line":163},[53,36414,36415],{},"> new Date(2012, 9, 28);\n",[53,36417,36418],{"class":55,"line":186},[53,36419,36420],{},"Sun Oct 28 2012 00:00:00 GMT+0200 (CEST)\n",[53,36422,36423],{"class":55,"line":221},[53,36424,36425],{},"> (new Date(2012, 9, 29) - new Date(2012, 9, 28)) / 60 / 60 / 100\n",[53,36427,36428],{"class":55,"line":242},[53,36429,36430],{},"25\n",[18,36432,36433],{},"So where is the error? The error is in our assumption that a day has 24 hours, because depending on how you define a\nday, it hasn’t – October 28th 2012 has 25 hours.",[18,36435,36436,36437,7314,36440,36444,36445,8713,36449,36453],{},"If you Google “JavaScript time difference”, most people just use\n",[50,36438,36439],{},"Math.round",[585,36441,2546],{"href":36442,"rel":36443},"http://psoug.org/snippet/Javascript-Calculate-time-difference-between-two-dates_116.htm",[589],") or simply\nuse flat-out buggy\ncode (",[585,36446,2546],{"href":36447,"rel":36448},"https://web.archive.org/web/20151119010127/http://www-10.lotus.com:80/ldd/ddwiki.nsf/dx/Various_Time_Differences_in_JavaScript",[589],[585,36450,2409],{"href":36451,"rel":36452},"http://www.javascriptkit.com/javatutors/datedifference.shtml",[589],")\nand call it a day (pun intended), but that is not how we roll here.",[18,36455,36456,36457,36460,36461,36464],{},"What do we really mean when we ask ",[573,36458,36459],{},"“How many days have passed between two dates in the calendar”","? We usually mean\n",[573,36462,36463],{},"“How many midnights have happened between these two dates?”",". Unfortunately, because of DST, you can’t just use the\nnumber of milliseconds between two dates at midnight to calculate how many midnights have happened, because some of them\nare more or less than 24 hours apart. If only there was a magical place that doesn’t have this madness going on…",[18,36466,36467,36468,36473],{},"Luckily, there is, and that place is ",[585,36469,36472],{"href":36470,"rel":36471},"http://en.wikipedia.org/wiki/Coordinated_Universal_Time",[589],"UTC",". UTC is a time\nmeasuring system that does not have daylight savings time.",[18,36475,36476,8713,36479],{},[27,36477,36478],{},"Edit:",[573,36480,36481,36482,36487,36488,986],{},"as pointed out in the comments, the rabbit hole goes down even further – officially, even in UTC, a day might\nhave more than 24 hours because of leap seconds. Fortunately for us, the ECMA-262\nspecification ",[585,36483,36486],{"href":36484,"rel":36485},"http://www.ecma-international.org/ecma-262/5.1/#sec-15.9.1.1",[589],"explicitly ignores leap seconds"," and we can\ngo about our business. If JavaScript would implement UTC correctly, we would have to account for leap seconds or\nuse ",[585,36489,36492],{"href":36490,"rel":36491},"http://en.wikipedia.org/wiki/Universal_Time#Versions",[589],"UT1",[18,36494,36495,36496,36498,36499,36501,36502,8713,36504,36507],{},"The JavaScript Date API is just as beautiful as most other JavaScript APIs: While the only useful use of the ",[50,36497,36278],{},"\nobject is by using it as a constructor (with ",[50,36500,23831],{},"), the way to use UTC is by using the ",[573,36503,6170],{},[50,36505,36506],{},"Date.UTC"," which\nreturns a unix timestamp. This is the JavaScript time API in a nutshell:",[43,36509,36511],{"className":13786,"code":36510,"language":13788,"meta":48,"style":48},"\n> new Date(2012, 9, 29);\nMon Oct 29 2012 00:00:00 GMT+0100 (CET) // (a somewhat useful object)\n> Date(2012, 9, 29);\n'Mon Nov 05 2012 16:18:12 GMT+0100 (CET)' // (a string - no relation to the parameters)\n> Date.UTC(2012, 9, 29);\n1351468800000 // (unix time in milliseconds)\n> new Date.UTC(2012, 9, 29); // failure\nTypeError: function UTC() { [native code] } is not a constructor\n at repl:1:9\n [....]\n\n",[50,36512,36513,36517,36521,36526,36531,36539,36544,36549,36554,36559,36564],{"__ignoreMap":48},[53,36514,36515],{"class":55,"line":56},[53,36516,500],{"emptyLinePlaceholder":499},[53,36518,36519],{"class":55,"line":86},[53,36520,36405],{},[53,36522,36523],{"class":55,"line":126},[53,36524,36525],{},"Mon Oct 29 2012 00:00:00 GMT+0100 (CET) // (a somewhat useful object)\n",[53,36527,36528],{"class":55,"line":163},[53,36529,36530],{},"> Date(2012, 9, 29);\n",[53,36532,36533,36536],{"class":55,"line":186},[53,36534,36535],{},"'Mon Nov 05 2012 16:18:12 GMT+0100 (CET)'",[53,36537,36538],{}," // (a string - no relation to the parameters)\n",[53,36540,36541],{"class":55,"line":221},[53,36542,36543],{},"> Date.UTC(2012, 9, 29);\n",[53,36545,36546],{"class":55,"line":242},[53,36547,36548],{},"1351468800000 // (unix time in milliseconds)\n",[53,36550,36551],{"class":55,"line":273},[53,36552,36553],{},"> new Date.UTC(2012, 9, 29); // failure\n",[53,36555,36556],{"class":55,"line":279},[53,36557,36558],{},"TypeError: function UTC() { [native code] } is not a constructor\n",[53,36560,36561],{"class":55,"line":496},[53,36562,36563],{}," at repl:1:9\n",[53,36565,36566],{"class":55,"line":503},[53,36567,36568],{}," [....]\n",[18,36570,36571,36572,36574],{},"The correct calculation, without using ",[50,36573,36439],{}," or other hacks therefore is",[43,36576,36578],{"className":13786,"code":36577,"language":13788,"meta":48,"style":48},"\nvar DAY_IN_MS = 24 * 60 * 60 * 1000;\nvar d1 = Date.UTC(2012, 9, 27);\nvar d2 = Date.UTC(2012, 9, 28);\nvar d3 = Date.UTC(2012, 9, 29);\nconsole.log((d2 - d1) / DAY_IN_MS); // yields 1\nconsole.log((d3 - d2) / DAY_IN_MS); // yields 1\n\n",[50,36579,36580,36584,36588,36593,36598,36603,36607],{"__ignoreMap":48},[53,36581,36582],{"class":55,"line":56},[53,36583,500],{"emptyLinePlaceholder":499},[53,36585,36586],{"class":55,"line":86},[53,36587,36313],{},[53,36589,36590],{"class":55,"line":126},[53,36591,36592],{},"var d1 = Date.UTC(2012, 9, 27);\n",[53,36594,36595],{"class":55,"line":163},[53,36596,36597],{},"var d2 = Date.UTC(2012, 9, 28);\n",[53,36599,36600],{"class":55,"line":186},[53,36601,36602],{},"var d3 = Date.UTC(2012, 9, 29);\n",[53,36604,36605],{"class":55,"line":221},[53,36606,36328],{},[53,36608,36609],{"class":55,"line":242},[53,36610,36611],{},"console.log((d3 - d2) / DAY_IN_MS); // yields 1\n",[18,36613,36614,36615,36618,36619,36624],{},"These kinds of bugs are sneaky because they only show up for certain input values (I wonder if I would have noticed it\nif I hadn’t tested the code last week around the DST change) and usually don’t show up in unit tests unless you happen\nto know what you are looking for. The results are often ",[573,36616,36617],{},"nearly"," correct, and we are not used to thinking about time\nzones and\nwe ",[585,36620,36623],{"href":36621,"rel":36622},"http://infiniteundo.com/post/25326999628/falsehoods-programmers-believe-about-time",[589],"often hold invalid assumptions about time",".\nAlways using UTC isn’t a solution either, because sometimes we want the local time zone to be considered.",[18,36626,36627,36628,36633,36634,36636],{},"Libraries like ",[585,36629,36632],{"href":36630,"rel":36631},"http://momentjs.com/",[589],"Moment.js"," help, but the real protection against these kinds of bugs is to know\nabout time zones, time measurement system and thinking about what you are actually calculating instead of simply\nthrowing a ",[50,36635,36439],{}," in there to make it all work.",[18,36638,36639,36640,36643,36644,36647],{},"Just as anybody that has had the pleasure of seeing ",[573,36641,36642],{},"Rent"," will tell you: while a year has ",[573,36645,36646],{},"five hundred twenty-five\nthousand six hundred minutes",", it still is difficult to measure the time of the year.",[607,36649,989],{},{"title":48,"searchDepth":86,"depth":86,"links":36651},[],[613],"2012-11-05T17:05:23","Let me tell you a tale about a fat-client application that has nice some time-related logic written in JavaScript. We\\nwant to calculate the difference between two dates, measured in days. Easy, you say, just use the Dateobject and do\\nsome calculating.","https://synyx.de/blog/properly-calculating-time-differences-in-javascript/",{},"/blog/properly-calculating-time-differences-in-javascript",{"title":36266,"description":36659},"Let me tell you a tale about a fat-client application that has nice some time-related logic written in JavaScript. We\nwant to calculate the difference between two dates, measured in days. Easy, you say, just use the Dateobject and do\nsome calculating.","blog/properly-calculating-time-differences-in-javascript",[4390,7265,36662,36663],"time","utc","Let me tell you a tale about a fat-client application that has nice some time-related logic written in JavaScript. We want to calculate the difference between two dates, measured in…","ni8qXO9StrZax4AqNEra8EirfvOftQEpRiXb9e9ojTo",{"id":36667,"title":36668,"author":36669,"body":36670,"category":36791,"date":36792,"description":36793,"extension":617,"link":36794,"meta":36795,"navigation":499,"path":36796,"seo":36797,"slug":36674,"stem":36798,"tags":36799,"teaser":36800,"__hash__":36801},"blog/blog/database-migration-using-flyway-and-spring-and-existing-data.md","Database Migration using Flyway and Spring (and existing Data)",[12981],{"type":11,"value":36671,"toc":36789},[36672,36675,36678,36685,36688,36691,36699,36702,36707,36710,36713,36719,36723,36727,36731,36734,36740],[14,36673,36668],{"id":36674},"database-migration-using-flyway-and-spring-and-existing-data",[18,36676,36677],{},"My team and I are currently working on an project we first started in early 2010. The application is in production\nsince sometime late 2010 and there has been no active development except for minor enhancements and bugfixes since then.\nEven if our code, processes and tools were good in 2010 we’ve improved a lot since then. Working on my old projects is\none of the occasions, where this becomes most evident.",[18,36679,36680,36681,36684],{},"When we start a new project today we usually use the database migration tool ",[585,36682,30014],{"href":30012,"rel":36683},[589],"\nright from the beginning. The tool keeps code and the database schema in sync and usually takes care of automatic\nmigration during application startup.",[18,36686,36687],{},"Back then we usually used SQL scripts, which had to be executed manually during deployment, to keep the database up to\ndate. Out of laziness or lack of time, this was also the first approach we took this week to handle database changes.\nThese scripts are checked into version control along with any code changes.",[18,36689,36690],{},"This may work pretty well in the beginning, but can also become annoying very fast: Everything you’ve to do manually is\ndestined to fail some time. ALL THE TIME! It fails on my colleagues working machines, it fails on our continuous\nintegration server (Jenkins) and it will probably fail hard on production, if you don’t pay enough attention during a\ndeployment.",[18,36692,36693,36694,986],{},"So there we were, about 60 minutes ago, standing there with a database dump from production and a bunch of SQL scripts,\nwhich accumulated during this week of development. Well, it is friday and I wanted to test something new so I remembered\na talk I attended earlier this year about an alternative to Liquibase: ",[585,36695,36698],{"href":36696,"rel":36697},"http://code.google.com/p/flyway/",[589],"flyway",[18,36700,36701],{},"What it basically does, is to execute a bunch of SQL scripts it hasn’t already executed on the given database. To get\nstarted I saved the dump of the production system into the db/migration/ package of our web application:",[18,36703,36704],{},[50,36705,36706],{},"mkdir -p src/main/respources/db/migration/ && cp prod_dump.sql src/main/resources/db/migration/V1_initial_import.sql",[18,36708,36709],{},"As many of our applications, this one too is based on Spring and Maven. So I added the flyway dependency to our pom.xml\nand also some XML to the bean configuration.",[18,36711,36712],{},"pom.xml:",[18,36714,36715,36716],{},"`",[36717,36718],"dependency",{},[36720,36721,36722],"group-id",{},"\ncom.googlecode.flyway\n",[36724,36725,36726],"artifact-id",{},"\nflyway-core\n",[36728,36729,36730],"version",{},"\n1.7\n",[18,36732,36733],{},"Even if there is a Maven plugin to execute the migration scripts, we got used to migrating the database during the\napplication boot process (because you will never have to think about it again, it simply migrates…). So we add the\nflyway bean to our bean configuration file. It is important that the flyway bean is instantiated early because it has to\nmigrate the database before anyone else uses it. In our case “anyone” is actually the EntityManager, so i configured the\npersistenceUnitManager to depend on flyway (which means flyway is running first):",[18,36735,36715,36736],{},[36737,36738],"bean",{"id":36698,"init-method":36739},"migrate",[36741,36742,36744,36747,36750],"property",{"name":36743,"ref":36743},"dataSource",[18,36745,36746],{},"\u003Cbean id=\"persistenceUnitManager\" depends-on=\"flyway\"",[18,36748,36749],{},"class=\"org.springframework.orm.jpa.persistenceunit.DefaultPersistenceUnitManager\">",[36741,36751,36753,36754,36761,36768,36771,36774,36777,36780,36783,36786],{"name":36752,"ref":36743},"defaultDataSource","\n\n`\n",[18,36755,36756,36757,35265],{},"Of course there are several configuration options for the flyway object. You can refer to\nthe ",[585,36758,10174],{"href":36759,"rel":36760},"http://code.google.com/p/flyway/wiki/ApplicationIntegration",[589],[18,36762,36763,36764,36767],{},"By default, flyway will now search for SQL scripts in your classpath. It expects the scripts in the db.migration\npackage, following a particular naming scheme: Vxxx",[27,36765,36766],{},"description.sql, just like the one we already created earlier (\nV1_initial_import.sql). It will also remember at which version the database currently is and will only execute scripts\nit has not executed so far. So when we start our application flyway will find our script and will execute it. Afterwards\nit will know, that the database is at version 1 and will not execute the V1"," file again. This will only work on an\nempty database so you should drop and create your local database at this point.",[18,36769,36770],{},"`5:29:11,362 INFO .flyway.core.metadatatable.MetaDataTable: 111 - Creating Metadata table: schema_version (Schema: mydb)",[18,36772,36773],{},"15:29:11,408 INFO glecode.flyway.core.migration.DbMigrator: 120 - Current schema version: null",[18,36775,36776],{},"15:29:11,412 INFO glecode.flyway.core.migration.DbMigrator: 205 - Migrating to version 1",[18,36778,36779],{},"15:29:24,694 INFO glecode.flyway.core.migration.DbMigrator: 191 - Successfully applied 1 migrations (execution time 00:\n1.290s).`",[18,36781,36782],{},"Now if I have database changes, I simply add a new SQL file containing the change with the prefix V2__ and so on. If\nmy colleagues update their working copy they will also get my SQL changes and flyway will execute it during application\nbootup (or integration-test) and nobody has to do this manually anymore.",[18,36784,36785],{},"Ok, nice. But what about production? When we deploy the new version of the app we also want the scripts to be executed\nbut not the initial import, right? I dont want to execute “drop database dbname; create database dbname;” there. Flyway\ninitializes itself on the first start but only if the database it writes to is empty. So the migration will fail on\nproduction.",[18,36787,36788],{},"For this case flyway also comes with a goal that creates the metadata tables. It comes with the ability, to initialize\nyour metadata tables at any given version. You can accomplish this by code (call init() on the flyway object), via the\nMaven plugin (flyway:init) or on the commandline. Because I did not want to install any extra software on the production\nmachine, I simply prepared an SQL dump of the metadata table (schema_version) right after the initial import was\nexecuted. This will now be executed against the production database right before the next deployment. Yes, manually… but\nfor the last time ;).",{"title":48,"searchDepth":86,"depth":86,"links":36790},[],[613],"2012-10-05T18:37:36","My team and I are currently working on an project we first started in early 2010. The application is in production\\nsince sometime late 2010 and there has been no active development except for minor enhancements and bugfixes since then.\\nEven if our code, processes and tools were good in 2010 we’ve improved a lot since then. Working on my old projects is\\none of the occasions, where this becomes most evident.","https://synyx.de/blog/database-migration-using-flyway-and-spring-and-existing-data/",{},"/blog/database-migration-using-flyway-and-spring-and-existing-data",{"title":36668,"description":36677},"blog/database-migration-using-flyway-and-spring-and-existing-data",[30643,36698,30646,1010],"My team and I are currently working on an project we first started in early 2010. The application is in production since sometime late 2010 and there has been no…","TnwY5J1dIhZAwIJnQj9D6OgKSWLYQ5REWmVXTVhYu3U",{"id":36803,"title":36804,"author":36805,"body":36806,"category":37629,"date":37630,"description":37631,"extension":617,"link":37632,"meta":37633,"navigation":499,"path":37634,"seo":37635,"slug":36810,"stem":37636,"tags":37637,"teaser":37638,"__hash__":37639},"blog/blog/implementing-acceptance-tests-with-jbehave.md","Implementing acceptance tests with jbehave",[13434],{"type":11,"value":36807,"toc":37627},[36808,36811,36814,36817,36820,36827,36830,36969,36983,37115,37132,37176,37179,37347,37350,37353,37493,37503,37597,37625],[14,36809,36804],{"id":36810},"implementing-acceptance-tests-with-jbehave",[18,36812,36813],{},"Producing high quality software in an agile process means that everybody involved in the delivery team (or in other\nwords: the team as a whole) do their best to ensure that each products increment delivered to the customer meets the\nbusiness values of each story that has been implemented.",[18,36815,36816],{},"One strategy to achieve this, is to define executable specifications each reflecting a stories acceptance criteria.\nDoing so allows you to document the story requirements and drive the development of the product – this is what behaviour\ndriven development (BDD) is about. Not only does an executable specification provide a definition-of-done for a given\nstory; in addition, it will serve as a regression test, ensuring the acceptance criteria are fulfilled while the product\nevolves in following sprints.",[18,36818,36819],{},"An important difference between automated acceptance tests and unit- (or component-) tests is that acceptance tests\nare business-facing, i.e. their responsibility is to make sure that each story implementation delivers a certain\nbusiness value to the customer. Ideally, they are written by the customer (with the help of business analysts and/or\ntesters) and are implemented by the testers, for example by pairing with programmers. There are various tools available\nsupporting the creation of automated acceptance tests, most of them providing a DSL (internal or external).",[18,36821,36822,36826],{},[585,36823,36825],{"href":12348,"rel":36824,"title":36825},[589],"jbehave"," is one of these tools, allowing to define a stories acceptance criteria in the\ncommonly used given-when-then form. The following snippets show how a jbehave acceptance test might look like, how it\nis implemented and integrated into a project. To keep things simple the acceptance test verifies the behaviour of a\ncertain service, directly using the service layers API. In a real project – where the behaviour of an application\nrunning in a test environment should be verified – the acceptance test probably would not be written against the serive\nAPI. Instead, an application driver would be injected into the acceptance test, in order to initialize application state\nand then make requests against the applications REST-API or the GUI (using a window driver component).",[18,36828,36829],{},"So, let’s start by adding the required dependencies and plugins into the POM (at this point, it should be obvious that\nmy demo project is Maven-based):",[43,36831,36833],{"className":13786,"code":36832,"language":13788,"meta":48,"style":48},"...\n\u003CmodelVersion>4.0.0\u003C/modelVersion>\n\u003CgroupId>dyn-ip\u003C/groupId>\n\u003CartifactId>dyn-ip\u003C/artifactId>\n\u003Cversion>1.0-SNAPSHOT\u003C/version>\n\u003Cpackaging>war\u003C/packaging>\n\u003Cname>DynIP\u003C/name>\n...\n\u003Cdependency>\n \u003CgroupId>junit\u003C/groupId>\n \u003CartifactId>junit\u003C/artifactId>\n \u003Cscope>test\u003C/scope>\n\u003C/dependency>\n\u003C!-- enables jbehave acceptance tests -->\n\u003Cdependency>\n \u003CgroupId>org.jbehave\u003C/groupId>\n \u003CartifactId>jbehave-core\u003C/artifactId>\n \u003Cversion>3.6.8\u003C/version>\n \u003Cscope>test\u003C/scope>\n\u003C/dependency>\n\u003C!-- adds dependency injection support\n using the Weld CDI container -->\n\u003Cdependency>\n \u003CgroupId>org.jbehave\u003C/groupId>\n \u003CartifactId>jbehave-weld\u003C/artifactId>\n \u003Cversion>3.6.8\u003C/version>\n \u003Cscope>test\u003C/scope>\n\u003C/dependency>\n...\n",[50,36834,36835,36839,36844,36849,36854,36859,36864,36869,36873,36878,36883,36888,36893,36898,36903,36907,36912,36917,36922,36926,36930,36935,36940,36944,36948,36953,36957,36961,36965],{"__ignoreMap":48},[53,36836,36837],{"class":55,"line":56},[53,36838,6348],{},[53,36840,36841],{"class":55,"line":86},[53,36842,36843],{},"\u003CmodelVersion>4.0.0\u003C/modelVersion>\n",[53,36845,36846],{"class":55,"line":126},[53,36847,36848],{},"\u003CgroupId>dyn-ip\u003C/groupId>\n",[53,36850,36851],{"class":55,"line":163},[53,36852,36853],{},"\u003CartifactId>dyn-ip\u003C/artifactId>\n",[53,36855,36856],{"class":55,"line":186},[53,36857,36858],{},"\u003Cversion>1.0-SNAPSHOT\u003C/version>\n",[53,36860,36861],{"class":55,"line":221},[53,36862,36863],{},"\u003Cpackaging>war\u003C/packaging>\n",[53,36865,36866],{"class":55,"line":242},[53,36867,36868],{},"\u003Cname>DynIP\u003C/name>\n",[53,36870,36871],{"class":55,"line":273},[53,36872,6348],{},[53,36874,36875],{"class":55,"line":279},[53,36876,36877],{},"\u003Cdependency>\n",[53,36879,36880],{"class":55,"line":496},[53,36881,36882],{}," \u003CgroupId>junit\u003C/groupId>\n",[53,36884,36885],{"class":55,"line":503},[53,36886,36887],{}," \u003CartifactId>junit\u003C/artifactId>\n",[53,36889,36890],{"class":55,"line":509},[53,36891,36892],{}," \u003Cscope>test\u003C/scope>\n",[53,36894,36895],{"class":55,"line":515},[53,36896,36897],{},"\u003C/dependency>\n",[53,36899,36900],{"class":55,"line":521},[53,36901,36902],{},"\u003C!-- enables jbehave acceptance tests -->\n",[53,36904,36905],{"class":55,"line":527},[53,36906,36877],{},[53,36908,36909],{"class":55,"line":533},[53,36910,36911],{}," \u003CgroupId>org.jbehave\u003C/groupId>\n",[53,36913,36914],{"class":55,"line":539},[53,36915,36916],{}," \u003CartifactId>jbehave-core\u003C/artifactId>\n",[53,36918,36919],{"class":55,"line":545},[53,36920,36921],{}," \u003Cversion>3.6.8\u003C/version>\n",[53,36923,36924],{"class":55,"line":2414},[53,36925,36892],{},[53,36927,36928],{"class":55,"line":2426},[53,36929,36897],{},[53,36931,36932],{"class":55,"line":2438},[53,36933,36934],{},"\u003C!-- adds dependency injection support\n",[53,36936,36937],{"class":55,"line":2451},[53,36938,36939],{}," using the Weld CDI container -->\n",[53,36941,36942],{"class":55,"line":2459},[53,36943,36877],{},[53,36945,36946],{"class":55,"line":2470},[53,36947,36911],{},[53,36949,36950],{"class":55,"line":2476},[53,36951,36952],{}," \u003CartifactId>jbehave-weld\u003C/artifactId>\n",[53,36954,36955],{"class":55,"line":2484},[53,36956,36921],{},[53,36958,36959],{"class":55,"line":2490},[53,36960,36892],{},[53,36962,36963],{"class":55,"line":2495},[53,36964,36897],{},[53,36966,36967],{"class":55,"line":2507},[53,36968,6348],{},[18,36970,10847,36971,36974,36975,36978,36979,36982],{},[50,36972,36973],{},"jbehave-weld"," dependency allows to inject dependencies into our Steps classes (Steps and Stories are shown below).\nFinally, the ",[50,36976,36977],{},"jbehave-maven-plugin"," is included into the POM and configured to match and execute Stories classes at the\nintegration-test phase using the plugins ",[50,36980,36981],{},"run-stories-with-annotated-embedder"," goal:",[43,36984,36986],{"className":13786,"code":36985,"language":13788,"meta":48,"style":48},"...\n\u003Cplugin>\n \u003CgroupId>org.jbehave\u003C/groupId>\n \u003CartifactId>jbehave-maven-plugin\u003C/artifactId>\n \u003Cversion>3.6.8\u003C/version>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cphase>integration-test\u003C/phase>\n \u003Cconfiguration>\n \u003Cincludes>\n \u003Cinclude>**/*Stories.java\u003C/include>\n \u003C/includes>\n \u003Cscope>test\u003C/scope>\n \u003CtestSourceDirectory>src/integrationtest/java\u003C/testSourceDirectory>\n \u003C/configuration>\n \u003Cgoals>\n \u003Cgoal>run-stories-with-annotated-embedder\u003C/goal>\n \u003C/goals>\n \u003C/execution>\n \u003C/executions>\n \u003Cdependencies>\n \u003Cdependency>\n \u003CgroupId>org.jbehave\u003C/groupId>\n \u003CartifactId>jbehave-weld\u003C/artifactId>\n \u003Cversion>3.6.8\u003C/version>\n \u003C/dependency>\n \u003C/dependencies>\n\u003C/plugin>\n...\n",[50,36987,36988,36992,36996,37000,37005,37009,37013,37017,37022,37026,37031,37036,37041,37046,37051,37055,37059,37064,37068,37072,37076,37080,37084,37089,37094,37099,37103,37107,37111],{"__ignoreMap":48},[53,36989,36990],{"class":55,"line":56},[53,36991,6348],{},[53,36993,36994],{"class":55,"line":86},[53,36995,22511],{},[53,36997,36998],{"class":55,"line":126},[53,36999,36911],{},[53,37001,37002],{"class":55,"line":163},[53,37003,37004],{}," \u003CartifactId>jbehave-maven-plugin\u003C/artifactId>\n",[53,37006,37007],{"class":55,"line":186},[53,37008,36921],{},[53,37010,37011],{"class":55,"line":221},[53,37012,22551],{},[53,37014,37015],{"class":55,"line":242},[53,37016,22556],{},[53,37018,37019],{"class":55,"line":273},[53,37020,37021],{}," \u003Cphase>integration-test\u003C/phase>\n",[53,37023,37024],{"class":55,"line":279},[53,37025,30113],{},[53,37027,37028],{"class":55,"line":496},[53,37029,37030],{}," \u003Cincludes>\n",[53,37032,37033],{"class":55,"line":503},[53,37034,37035],{}," \u003Cinclude>**/*Stories.java\u003C/include>\n",[53,37037,37038],{"class":55,"line":509},[53,37039,37040],{}," \u003C/includes>\n",[53,37042,37043],{"class":55,"line":515},[53,37044,37045],{}," \u003Cscope>test\u003C/scope>\n",[53,37047,37048],{"class":55,"line":521},[53,37049,37050],{}," \u003CtestSourceDirectory>src/integrationtest/java\u003C/testSourceDirectory>\n",[53,37052,37053],{"class":55,"line":527},[53,37054,30138],{},[53,37056,37057],{"class":55,"line":533},[53,37058,22566],{},[53,37060,37061],{"class":55,"line":539},[53,37062,37063],{}," \u003Cgoal>run-stories-with-annotated-embedder\u003C/goal>\n",[53,37065,37066],{"class":55,"line":545},[53,37067,22576],{},[53,37069,37070],{"class":55,"line":2414},[53,37071,22581],{},[53,37073,37074],{"class":55,"line":2426},[53,37075,22586],{},[53,37077,37078],{"class":55,"line":2438},[53,37079,22833],{},[53,37081,37082],{"class":55,"line":2451},[53,37083,3811],{},[53,37085,37086],{"class":55,"line":2459},[53,37087,37088],{}," \u003CgroupId>org.jbehave\u003C/groupId>\n",[53,37090,37091],{"class":55,"line":2470},[53,37092,37093],{}," \u003CartifactId>jbehave-weld\u003C/artifactId>\n",[53,37095,37096],{"class":55,"line":2476},[53,37097,37098],{}," \u003Cversion>3.6.8\u003C/version>\n",[53,37100,37101],{"class":55,"line":2484},[53,37102,3831],{},[53,37104,37105],{"class":55,"line":2490},[53,37106,22882],{},[53,37108,37109],{"class":55,"line":2495},[53,37110,22591],{},[53,37112,37113],{"class":55,"line":2507},[53,37114,6348],{},[18,37116,37117,37118,4816,37121,37124,37125,37128,37129,4101],{},"Note that in my demo project, all jbehave related classes and resources are located in the ",[50,37119,37120],{},"src/integrationtest/java/",[50,37122,37123],{},"src/integrationtest/resources/"," directories respectively. In order for this to work, you also have to include the\n",[50,37126,37127],{},"build-helper-maven-plugin"," into the POM. In a real project it would probably be better to have a separate project for\nyour acceptance tests. Now, we will define some acceptance criteria stored in the file\n",[50,37130,37131],{},"src/integrationtest/resources/stories/dynip.stories",[43,37133,37135],{"className":13786,"code":37134,"language":13788,"meta":48,"style":48},"Scenario: get an ip address (host unspecified)\nGiven an initialized network service\nWhen no host is specified\nThen the result should be any valid ip address\nScenario: get an ip address for host\nGiven an initialized network service\nWhen the host is localhost\nThen the address should be 127.0.0.1\n",[50,37136,37137,37142,37147,37152,37157,37162,37166,37171],{"__ignoreMap":48},[53,37138,37139],{"class":55,"line":56},[53,37140,37141],{},"Scenario: get an ip address (host unspecified)\n",[53,37143,37144],{"class":55,"line":86},[53,37145,37146],{},"Given an initialized network service\n",[53,37148,37149],{"class":55,"line":126},[53,37150,37151],{},"When no host is specified\n",[53,37153,37154],{"class":55,"line":163},[53,37155,37156],{},"Then the result should be any valid ip address\n",[53,37158,37159],{"class":55,"line":186},[53,37160,37161],{},"Scenario: get an ip address for host\n",[53,37163,37164],{"class":55,"line":221},[53,37165,37146],{},[53,37167,37168],{"class":55,"line":242},[53,37169,37170],{},"When the host is localhost\n",[53,37172,37173],{"class":55,"line":273},[53,37174,37175],{},"Then the address should be 127.0.0.1\n",[18,37177,37178],{},"As you can see, the acceptance test is about verifying the behaviour of some network service. Here, the acceptance\ncriteria for two scenarios are written down in a given-when-then form. Next, we create an implementation of the\nscenario steps in a Steps class (which is just a POJO):",[43,37180,37182],{"className":288,"code":37181,"language":290,"meta":48,"style":48},"@WeldStep\n@Singleton\npublic class DynIpSteps {\nprivate static ValidatorFactory validatorFactory;\n static {\n validatorFactory = buildDefaultValidatorFactory();\n }\n @Inject\n private NetworkService networkService;\n private String host;\n @Given(\"an initialized network service\")\n public void anInitializedNetworkService() {\n }\n @When(\"no host is specified\")\n public void whenNoHostIsSpecified() {\n this.host = null;\n }\n @Then(\"the result should be any valid ip address\")\n public void thenTheResultShouldBeAnyValidIpAddress() {\n Validator validator = validatorFactory.getValidator();\n Set\u003CConstraintViolation\u003CIpv4Address>> violations =\n validator.validate(networkService.getIpv4Address());\n assertThat(violations.size(), is(0));\n }\n @When(\"the host is $host\")\n public void whenTheHostIs$Host(String host) {\n this.host = host;\n }\n @Then(\"the address should be $address\")\n public void thenTheAddressShouldBe$Address(String address) {\n Ipv4Address ip = networkService.getIpv4AddressForHost(host);\n assertThat(ip.getTextualRepresentation(), is(address));\n }\n}\n",[50,37183,37184,37189,37194,37199,37204,37209,37214,37218,37223,37228,37233,37238,37243,37247,37252,37257,37262,37266,37271,37276,37281,37286,37291,37296,37300,37305,37310,37315,37319,37324,37329,37334,37339,37343],{"__ignoreMap":48},[53,37185,37186],{"class":55,"line":56},[53,37187,37188],{},"@WeldStep\n",[53,37190,37191],{"class":55,"line":86},[53,37192,37193],{},"@Singleton\n",[53,37195,37196],{"class":55,"line":126},[53,37197,37198],{},"public class DynIpSteps {\n",[53,37200,37201],{"class":55,"line":163},[53,37202,37203],{},"private static ValidatorFactory validatorFactory;\n",[53,37205,37206],{"class":55,"line":186},[53,37207,37208],{}," static {\n",[53,37210,37211],{"class":55,"line":221},[53,37212,37213],{}," validatorFactory = buildDefaultValidatorFactory();\n",[53,37215,37216],{"class":55,"line":242},[53,37217,7384],{},[53,37219,37220],{"class":55,"line":273},[53,37221,37222],{}," @Inject\n",[53,37224,37225],{"class":55,"line":279},[53,37226,37227],{}," private NetworkService networkService;\n",[53,37229,37230],{"class":55,"line":496},[53,37231,37232],{}," private String host;\n",[53,37234,37235],{"class":55,"line":503},[53,37236,37237],{}," @Given(\"an initialized network service\")\n",[53,37239,37240],{"class":55,"line":509},[53,37241,37242],{}," public void anInitializedNetworkService() {\n",[53,37244,37245],{"class":55,"line":515},[53,37246,7384],{},[53,37248,37249],{"class":55,"line":521},[53,37250,37251],{}," @When(\"no host is specified\")\n",[53,37253,37254],{"class":55,"line":527},[53,37255,37256],{}," public void whenNoHostIsSpecified() {\n",[53,37258,37259],{"class":55,"line":533},[53,37260,37261],{}," this.host = null;\n",[53,37263,37264],{"class":55,"line":539},[53,37265,7384],{},[53,37267,37268],{"class":55,"line":545},[53,37269,37270],{}," @Then(\"the result should be any valid ip address\")\n",[53,37272,37273],{"class":55,"line":2414},[53,37274,37275],{}," public void thenTheResultShouldBeAnyValidIpAddress() {\n",[53,37277,37278],{"class":55,"line":2426},[53,37279,37280],{}," Validator validator = validatorFactory.getValidator();\n",[53,37282,37283],{"class":55,"line":2438},[53,37284,37285],{}," Set\u003CConstraintViolation\u003CIpv4Address>> violations =\n",[53,37287,37288],{"class":55,"line":2451},[53,37289,37290],{}," validator.validate(networkService.getIpv4Address());\n",[53,37292,37293],{"class":55,"line":2459},[53,37294,37295],{}," assertThat(violations.size(), is(0));\n",[53,37297,37298],{"class":55,"line":2470},[53,37299,7384],{},[53,37301,37302],{"class":55,"line":2476},[53,37303,37304],{}," @When(\"the host is $host\")\n",[53,37306,37307],{"class":55,"line":2484},[53,37308,37309],{}," public void whenTheHostIs$Host(String host) {\n",[53,37311,37312],{"class":55,"line":2490},[53,37313,37314],{}," this.host = host;\n",[53,37316,37317],{"class":55,"line":2495},[53,37318,7384],{},[53,37320,37321],{"class":55,"line":2507},[53,37322,37323],{}," @Then(\"the address should be $address\")\n",[53,37325,37326],{"class":55,"line":2528},[53,37327,37328],{}," public void thenTheAddressShouldBe$Address(String address) {\n",[53,37330,37331],{"class":55,"line":2539},[53,37332,37333],{}," Ipv4Address ip = networkService.getIpv4AddressForHost(host);\n",[53,37335,37336],{"class":55,"line":2551},[53,37337,37338],{}," assertThat(ip.getTextualRepresentation(), is(address));\n",[53,37340,37341],{"class":55,"line":2562},[53,37342,7384],{},[53,37344,37345],{"class":55,"line":2573},[53,37346,282],{},[18,37348,37349],{},"While the class level annotations are related to dependency injection, the method level annotations represent the\nvarious steps defined in the stories scenarios.",[18,37351,37352],{},"Along with the Steps class we also implement a Stories class which knows about both, the story files and the Steps\nclasses. In order to reduce boilerplate code, an abstract base class is extended by each Stories class:",[43,37354,37356],{"className":288,"code":37355,"language":290,"meta":48,"style":48},"@UsingSteps(instances = {DynIpSteps.class})\npublic class DynIpStories extends StoriesBase {\n public DynIpStories() {\n super(\"**/*.stories\");\n }\n}\n@RunWith(WeldAnnotatedEmbedderRunner.class)\n@Configure\n@UsingWeld\n@UsingEmbedder(embedder = Embedder.class,\n generateViewAfterStories = true,\n ignoreFailureInStories = true,\n ignoreFailureInView = true)\npublic abstract class StoriesBase extends InjectableEmbedder {\n private final String storiesPath;\n private final URL codeLocation;\n protected StoriesBase(String storiesPath) {\n this.storiesPath = storiesPath;\n this.codeLocation = codeLocationFromClass(this.getClass());\n }\n @Test\n @Override\n public void run() throws Throwable {\n StoryFinder storyFinder = new StoryFinder();\n injectedEmbedder().runStoriesAsPaths(\n storyFinder.findPaths(codeLocation, storiesPath, \"\"));\n }\n}\n",[50,37357,37358,37363,37368,37373,37378,37382,37386,37391,37396,37401,37406,37411,37416,37421,37426,37431,37436,37441,37446,37451,37455,37460,37465,37470,37475,37480,37485,37489],{"__ignoreMap":48},[53,37359,37360],{"class":55,"line":56},[53,37361,37362],{},"@UsingSteps(instances = {DynIpSteps.class})\n",[53,37364,37365],{"class":55,"line":86},[53,37366,37367],{},"public class DynIpStories extends StoriesBase {\n",[53,37369,37370],{"class":55,"line":126},[53,37371,37372],{}," public DynIpStories() {\n",[53,37374,37375],{"class":55,"line":163},[53,37376,37377],{}," super(\"**/*.stories\");\n",[53,37379,37380],{"class":55,"line":186},[53,37381,7384],{},[53,37383,37384],{"class":55,"line":221},[53,37385,282],{},[53,37387,37388],{"class":55,"line":242},[53,37389,37390],{},"@RunWith(WeldAnnotatedEmbedderRunner.class)\n",[53,37392,37393],{"class":55,"line":273},[53,37394,37395],{},"@Configure\n",[53,37397,37398],{"class":55,"line":279},[53,37399,37400],{},"@UsingWeld\n",[53,37402,37403],{"class":55,"line":496},[53,37404,37405],{},"@UsingEmbedder(embedder = Embedder.class,\n",[53,37407,37408],{"class":55,"line":503},[53,37409,37410],{}," generateViewAfterStories = true,\n",[53,37412,37413],{"class":55,"line":509},[53,37414,37415],{}," ignoreFailureInStories = true,\n",[53,37417,37418],{"class":55,"line":515},[53,37419,37420],{}," ignoreFailureInView = true)\n",[53,37422,37423],{"class":55,"line":521},[53,37424,37425],{},"public abstract class StoriesBase extends InjectableEmbedder {\n",[53,37427,37428],{"class":55,"line":527},[53,37429,37430],{}," private final String storiesPath;\n",[53,37432,37433],{"class":55,"line":533},[53,37434,37435],{}," private final URL codeLocation;\n",[53,37437,37438],{"class":55,"line":539},[53,37439,37440],{}," protected StoriesBase(String storiesPath) {\n",[53,37442,37443],{"class":55,"line":545},[53,37444,37445],{}," this.storiesPath = storiesPath;\n",[53,37447,37448],{"class":55,"line":2414},[53,37449,37450],{}," this.codeLocation = codeLocationFromClass(this.getClass());\n",[53,37452,37453],{"class":55,"line":2426},[53,37454,7384],{},[53,37456,37457],{"class":55,"line":2438},[53,37458,37459],{}," @Test\n",[53,37461,37462],{"class":55,"line":2451},[53,37463,37464],{}," @Override\n",[53,37466,37467],{"class":55,"line":2459},[53,37468,37469],{}," public void run() throws Throwable {\n",[53,37471,37472],{"class":55,"line":2470},[53,37473,37474],{}," StoryFinder storyFinder = new StoryFinder();\n",[53,37476,37477],{"class":55,"line":2476},[53,37478,37479],{}," injectedEmbedder().runStoriesAsPaths(\n",[53,37481,37482],{"class":55,"line":2484},[53,37483,37484],{}," storyFinder.findPaths(codeLocation, storiesPath, \"\"));\n",[53,37486,37487],{"class":55,"line":2490},[53,37488,7384],{},[53,37490,37491],{"class":55,"line":2495},[53,37492,282],{},[18,37494,37495,37496,37498,37499,37502],{},"Since we are using jbehave annotations it’s important to set the goal element to ",[50,37497,36981],{},"\nin the ",[50,37500,37501],{},"jbehave-maven-plugins"," configuration (as shown above). Now, there are only a few configuration related classes\nto be implemented: one class that produces the configuration information and two other classes that relate to story\nloading and report building:",[43,37504,37506],{"className":288,"code":37505,"language":290,"meta":48,"style":48},"@ApplicationScoped\npublic class ConfigurationProducer {\n@Produces @WeldConfiguration\npublic Configuration getConfiguration() {\n return new MostUsefulConfiguration()\n .useStoryLoader(new StoryLoader())\n .useStoryReporterBuilder(new ReportBuilder());\n }\n}\npublic class StoryLoader extends LoadFromClasspath {\n public StoryLoader() {\n super(StoryLoader.class.getClassLoader());\n }\n}\npublic class ReportBuilder extends StoryReporterBuilder {\n public ReportBuilder() {\n withFormats(CONSOLE, TXT, HTML).withDefaultFormats();\n }\n}\n",[50,37507,37508,37513,37518,37523,37528,37533,37538,37543,37547,37551,37556,37561,37566,37570,37574,37579,37584,37589,37593],{"__ignoreMap":48},[53,37509,37510],{"class":55,"line":56},[53,37511,37512],{},"@ApplicationScoped\n",[53,37514,37515],{"class":55,"line":86},[53,37516,37517],{},"public class ConfigurationProducer {\n",[53,37519,37520],{"class":55,"line":126},[53,37521,37522],{},"@Produces @WeldConfiguration\n",[53,37524,37525],{"class":55,"line":163},[53,37526,37527],{},"public Configuration getConfiguration() {\n",[53,37529,37530],{"class":55,"line":186},[53,37531,37532],{}," return new MostUsefulConfiguration()\n",[53,37534,37535],{"class":55,"line":221},[53,37536,37537],{}," .useStoryLoader(new StoryLoader())\n",[53,37539,37540],{"class":55,"line":242},[53,37541,37542],{}," .useStoryReporterBuilder(new ReportBuilder());\n",[53,37544,37545],{"class":55,"line":273},[53,37546,7384],{},[53,37548,37549],{"class":55,"line":279},[53,37550,282],{},[53,37552,37553],{"class":55,"line":496},[53,37554,37555],{},"public class StoryLoader extends LoadFromClasspath {\n",[53,37557,37558],{"class":55,"line":503},[53,37559,37560],{}," public StoryLoader() {\n",[53,37562,37563],{"class":55,"line":509},[53,37564,37565],{}," super(StoryLoader.class.getClassLoader());\n",[53,37567,37568],{"class":55,"line":515},[53,37569,7384],{},[53,37571,37572],{"class":55,"line":521},[53,37573,282],{},[53,37575,37576],{"class":55,"line":527},[53,37577,37578],{},"public class ReportBuilder extends StoryReporterBuilder {\n",[53,37580,37581],{"class":55,"line":533},[53,37582,37583],{}," public ReportBuilder() {\n",[53,37585,37586],{"class":55,"line":539},[53,37587,37588],{}," withFormats(CONSOLE, TXT, HTML).withDefaultFormats();\n",[53,37590,37591],{"class":55,"line":545},[53,37592,7384],{},[53,37594,37595],{"class":55,"line":2414},[53,37596,282],{},[18,37598,37599,37600,37603,37604,99,37607,11792,37610,37613,37614,37617,37618,37621,37622],{},"The Steps class must be annotated with annotation ",[50,37601,37602],{},"javax.inject.Singleton"," (this is because in my demo project I’m using\nthe Weld CDI container which does not default to create singleton scoped beans; having no singleton prevents us from\nholding any state information across multiple calls of the ",[50,37605,37606],{},"@Given",[50,37608,37609],{},"@When",[50,37611,37612],{},"@Then"," annotated methods). For the\ndependency injection mechanism to work, there must also be (an empty) ",[50,37615,37616],{},"beans.xml"," file in both, directory\n",[50,37619,37620],{},"src/integrationtest/resources/META-INF/"," and directory ",[50,37623,37624],{},"src/main/resources/META-INF/",[607,37626,989],{},{"title":48,"searchDepth":86,"depth":86,"links":37628},[],[613],"2012-08-26T17:53:49","Producing high quality software in an agile process means that everybody involved in the delivery team (or in other\\nwords: the team as a whole) do their best to ensure that each products increment delivered to the customer meets the\\nbusiness values of each story that has been implemented.","https://synyx.de/blog/implementing-acceptance-tests-with-jbehave/",{},"/blog/implementing-acceptance-tests-with-jbehave",{"title":36804,"description":36813},"blog/implementing-acceptance-tests-with-jbehave",[12448,36825],"Producing high quality software in an agile process means that everybody involved in the delivery team (or in other words: the team as a whole) do their best to ensure…","48wd0yTGmAyY4Dloi3EOh2Qx9HSxfb5R_I-JR7FeADE",{"id":37641,"title":37642,"author":37643,"body":37644,"category":37717,"date":37718,"description":37651,"extension":617,"link":37719,"meta":37720,"navigation":499,"path":37721,"seo":37722,"slug":37723,"stem":37724,"tags":37725,"teaser":37726,"__hash__":37727},"blog/blog/clean-code-development-prinzipien-und-praktiken-zur-steigerung-der-software-qualitat.md","Clean Code Development – Prinzipien und Praktiken zur Steigerung der Software Qualität",[9608],{"type":11,"value":37645,"toc":37715},[37646,37649,37652,37655,37658,37663,37666,37671,37674,37677,37682,37685,37688,37693,37696,37699,37702,37705,37708],[14,37647,37642],{"id":37648},"clean-code-development-prinzipien-und-praktiken-zur-steigerung-der-software-qualität",[18,37650,37651],{},"Software Qualität ist ein Trendthema aber wie erreicht man eine hohe",[18,37653,37654],{},"Qualität? Reichen gängige Instrumente wie UnitTests und Code-Reviews aus?",[18,37656,37657],{},"Clean Code Development bietet keine fertigen Problemlösungen sondern fasst Prinzipien und Praktiken zusammen und sieht\neine stufenweise Einführung derer vor. Ohne auf die einzelnen Grade, welche den Weg zum Clean Code Developer\nbeschreiben, einzugehen möchte ich hier ein paar ausgewählte Prinzipien kurz vorstellen.",[18,37659,37660],{},[27,37661,37662],{},"Don’t Repeat Yourself (DRY)",[18,37664,37665],{},"Code Duplizierung sollte vermieden werden, da Anpassungen an mehreren stellen erfolgen müssen um Konsistenz zu\ngewährleisten. Eigentlich ein alter Hut aber Copy & Paste ist in der Praxis weit verbreitet.",[18,37667,37668],{},[27,37669,37670],{},"Keep it simple, stupid (KISS)",[18,37672,37673],{},"Mach es nicht komplizierter als notwendig.",[18,37675,37676],{},"Ja, auch wenn es dir uncool und langweilig erscheint!",[18,37678,37679],{},[27,37680,37681],{},"Favour Composition over Inheritance (FCoI)",[18,37683,37684],{},"Bei der Vererbung entstehen Abhängikeiten von den Subklassen zu den Elternklassen, was das Austauschen der\nFunktionalität zur Laufzeit erschwert. Wird die Vererbung inflationär verwendet, kann dies ein System schnell unnötig\nkomplex und schlecht testbar werden lassen.",[18,37686,37687],{},"Bei der Komposition verwendet eine Klasse eine andere, was die Abhängigkeiten reduziert und die Testbarkeit wesentlich\nvereinfacht.",[18,37689,37690],{},[27,37691,37692],{},"You Ain’t Gonna Need It (YAGNI)",[18,37694,37695],{},"Dieses Prinzip bedeutet soviel wie “implementiere nichts was du nicht brauchst”. Das klingt logisch aber dennoch hab ich\nselbst schon gegen dieses Prinzip verstoßen.",[18,37697,37698],{},"In der Praxis wird versucht Software auf künftige Anforderungen vorzubereiten aber oft stellt sich später heraus, dass\nsich die ursprünglich erwarteten Anforderungen tatsächlich nie ergeben haben.",[18,37700,37701],{},"Statt dessen ergeben sich natürlich ganz andere Anforderungen die nicht berücksichtigt wurden und deren Umsetzung jetzt\ndurch unnötigen Balast behindert werden.",[18,37703,37704],{},"Die Praktiken des Clean Code Developments sind Methoden und Techniken die ständig eingesetzt werden sollten und umfassen\nzum Beispiel das Issue Tracking, automatisierte Integrationstests, Continuous Integration, statische Codeanalysen aber\nauch das Weitergeben von Erfahrungen und die Teilnahme an Fachveranstaltungen.",[18,37706,37707],{},"Mehr zu Clean Code Development gibt es unter",[18,37709,37710],{},[585,37711,37712],{"href":37712,"rel":37713,"title":37714},"http://www.clean-code-developer.de",[589],"clean-code-developer",{"title":48,"searchDepth":86,"depth":86,"links":37716},[],[613],"2012-08-16T09:57:29","https://synyx.de/blog/clean-code-development-prinzipien-und-praktiken-zur-steigerung-der-software-qualitat/",{},"/blog/clean-code-development-prinzipien-und-praktiken-zur-steigerung-der-software-qualitat",{"title":37642,"description":37651},"clean-code-development-prinzipien-und-praktiken-zur-steigerung-der-software-qualitat","blog/clean-code-development-prinzipien-und-praktiken-zur-steigerung-der-software-qualitat",[7611,13172],"Software Qualität ist ein Trendthema aber wie erreicht man eine hohe Qualität? Reichen gängige Instrumente wie UnitTests und Code-Reviews aus? Clean Code Development bietet keine fertigen Problemlösungen sondern fasst Prinzipien…","qanNmRX3aNPeWTNpe7MMGWO8r9OdqTQQpJRo9UX9hwo",{"id":37729,"title":35741,"author":37730,"body":37731,"category":38353,"date":38354,"description":38355,"extension":617,"link":38356,"meta":38357,"navigation":499,"path":38358,"seo":38359,"slug":37735,"stem":38360,"tags":38361,"teaser":38362,"__hash__":38363},"blog/blog/visualize-javascript-code-quality-and-code-coverage-with-sonar.md",[6892],{"type":11,"value":37732,"toc":38344},[37733,37736,37739,37745,37748,37754,37767,37770,37774,37822,37825,37838,37841,37855,37866,37869,37954,37956,37959,37961,37964,37966,37969,37971,37981,37983,37986,37989,38032,38035,38067,38070,38158,38167,38170,38173,38176,38181,38185,38192,38196,38203,38226,38230,38233,38242,38245,38254,38265,38274,38278,38281,38284,38292,38302,38306,38311,38317,38322,38327,38342],[14,37734,35741],{"id":37735},"visualize-javascript-code-quality-and-code-coverage-with-sonar",[18,37737,37738],{},"It is hard to imagine a web project without JavaScript code today. JavaScript is an easy to learn and very performant\nscript language. In the past we have used JavaScript mostly for eye-candy and form validation. Recently we have been\nasked more often to implement complex user interfaces with trees, sortable tables and things like that. So we decided to\nrely more on JavaScript to improve the feedback of the website to user actions.",[18,37740,37741,37742],{},"The first question that came up was: ",[27,37743,37744],{},"How to develop test driven with JavaScript?",[18,37746,37747],{},"We decided to use Jasmine, a behaviour driven development framework which tests can be run headless in a Maven build for\nexample.",[18,37749,37750,37751],{},"The second question was: ",[27,37752,37753],{},"How to visualise code coverage and code quality?",[18,37755,37756,37757,37762,37763,37766],{},"The tool Sonar has been proven to be useful in our Java projects in the past. So the first I was searching for was the\nJavaScript Plugin for Sonar. It can be\ndownloaded ",[585,37758,10819],{"href":37759,"rel":37760,"title":37761},"https://web.archive.org/web/20150530172810/http://docs.codehaus.org/display/SONAR/JavaScript+Plugin",[589],"Sonar JavaScript Plugin","\nand was luckily updated to version 1.0 recently with a bunch of new metrics like “",[573,37764,37765],{},"avoid usage of == and !=","”.",[18,37768,37769],{},"Unfortunately this plugin only supports JsTestDriver for code coverage and other test metrics. However, Jasmine support\nis on the Roadmap and I’m looking forward to see the next release of the JavaScript Plugin. But at the moment I had to\nintegrate our Jasmine and jasmine-jquery tests with JsTestDriver, the JavaScript Plugin of Sonar and an automated maven\nbuild.",[2207,37771,37773],{"id":37772},"used-technologies","Used technologies",[577,37775,37776,37783,37790,37797,37804,37811,37817],{},[580,37777,37778],{},[585,37779,37782],{"href":37780,"rel":37781,"title":37782},"https://maven.apache.org/",[589],"Maven",[580,37784,37785],{},[585,37786,37789],{"href":37787,"rel":37788,"title":37789},"https://github.com/jasmine/jasmine",[589],"Jasmine",[580,37791,37792],{},[585,37793,37796],{"href":37794,"rel":37795,"title":37796},"https://github.com/ibolmo/jasmine-jstd-adapter",[589],"jasmine-jstd-adapter",[580,37798,37799],{},[585,37800,37803],{"href":37801,"rel":37802,"title":37803},"https://code.google.com/p/js-test-driver/",[589],"JsTestDriver",[580,37805,37806],{},[585,37807,37810],{"href":37808,"rel":37809,"title":37810},"https://code.google.com/p/jstd-maven-plugin/",[589],"jstd-maven-plugin",[580,37812,37813],{},[585,37814,27693],{"href":37815,"rel":37816,"title":27693},"https://sonarsource.com/",[589],[580,37818,37819],{},[585,37820,37761],{"href":37759,"rel":37821,"title":37761},[589],[2207,37823,37803],{"id":37824},"jstestdriver",[18,37826,37827,37828,37832,37833,986],{},"JsTestDriver is a test runner designed by Google and can be\ndownloaded ",[585,37829,10819],{"href":37830,"rel":37831,"title":37803},"http://code.google.com/p/js-test-driver/downloads/",[589],". If you don’t know JSTestDriver\nyet, you maybe want to run over\nit’s ",[585,37834,10174],{"href":37835,"rel":37836,"title":37837},"http://code.google.com/p/js-test-driver/w/list",[589],"JsTestDriver Documentation",[18,37839,37840],{},"Some advantages of JsTestDriver:",[577,37842,37843,37846,37849,37852],{},[580,37844,37845],{},"Eclipse and IntelliJ integration",[580,37847,37848],{},"Maven Plugin",[580,37850,37851],{},"parallel test executions across browsers (local or remote)",[580,37853,37854],{},"code coverage",[18,37856,37857,37858,37861,37862,37865],{},"In order to run JsTestDriver you have to create a config file. By default you have to name it ",[573,37859,37860],{},"jsTestDriver.conf"," and\nyou have to save it in ",[573,37863,37864],{},"src/test/resources",". If you want to choose another filename or path remember to define these in\nthe Maven plugin later.",[18,37867,37868],{},"In the config file you have to define following flags (Be sure that there are no whitespaces in front of the keywords):",[43,37870,37872],{"className":13786,"code":37871,"language":13788,"meta":48,"style":48},"server: http://localhost:9876\nload:\n # jasmine dependency\n - lib/jasmine.js\n # to make our jasmine tests work within jstd\n # (must be included after jasmine.js)\n - lib/jasmineAdapter.js\n # models, views and other stuff\n - src/main/js/*.js\ntest:\n # load all test files\n - src/test/js/*Test.js\nplugin:\n - name: \"coverage\"\n jar: \"lib/coverage-1.3.4.b.jar\"\n module: \"com.google.jstestdriver.coverage.CoverageModule\"\n",[50,37873,37874,37879,37884,37889,37894,37899,37904,37909,37914,37919,37924,37929,37934,37939,37944,37949],{"__ignoreMap":48},[53,37875,37876],{"class":55,"line":56},[53,37877,37878],{},"server: http://localhost:9876\n",[53,37880,37881],{"class":55,"line":86},[53,37882,37883],{},"load:\n",[53,37885,37886],{"class":55,"line":126},[53,37887,37888],{}," # jasmine dependency\n",[53,37890,37891],{"class":55,"line":163},[53,37892,37893],{}," - lib/jasmine.js\n",[53,37895,37896],{"class":55,"line":186},[53,37897,37898],{}," # to make our jasmine tests work within jstd\n",[53,37900,37901],{"class":55,"line":221},[53,37902,37903],{}," # (must be included after jasmine.js)\n",[53,37905,37906],{"class":55,"line":242},[53,37907,37908],{}," - lib/jasmineAdapter.js\n",[53,37910,37911],{"class":55,"line":273},[53,37912,37913],{}," # models, views and other stuff\n",[53,37915,37916],{"class":55,"line":279},[53,37917,37918],{}," - src/main/js/*.js\n",[53,37920,37921],{"class":55,"line":496},[53,37922,37923],{},"test:\n",[53,37925,37926],{"class":55,"line":503},[53,37927,37928],{}," # load all test files\n",[53,37930,37931],{"class":55,"line":509},[53,37932,37933],{}," - src/test/js/*Test.js\n",[53,37935,37936],{"class":55,"line":515},[53,37937,37938],{},"plugin:\n",[53,37940,37941],{"class":55,"line":521},[53,37942,37943],{}," - name: \"coverage\"\n",[53,37945,37946],{"class":55,"line":527},[53,37947,37948],{}," jar: \"lib/coverage-1.3.4.b.jar\"\n",[53,37950,37951],{"class":55,"line":533},[53,37952,37953],{}," module: \"com.google.jstestdriver.coverage.CoverageModule\"\n",[1217,37955,19428],{"id":19428},[18,37957,37958],{},"JsTestDriver starts it’s own server and generates a HTML page that can be captured by various browsers.",[1217,37960,17278],{"id":17278},[18,37962,37963],{},"Define all needed JavaScript files like your models and views and so on. You can use a wildcard * to include all files\nwithin the specified directory. Note that it could be relevant to load your files in an correct order since a normal\nHTML page will be created and some dependencies have to be considered. The load sequence will be alphabetically if the\nwildcard is used.",[1217,37965,21474],{"id":21474},[18,37967,37968],{},"simply attaches the test files to the created HTML page.",[1217,37970,4044],{"id":4044},[18,37972,37973,37974,37980],{},"to be able to see the code coverage report in Sonar you have to download\nthe ",[585,37975,37979],{"href":37976,"rel":37977,"title":37978},"http://code.google.com/p/js-test-driver/downloads/list",[589],"jstd coverage plugin","coverage plugin"," and save it\nsomewhere as you wish. Just remember to add the path to the plugin jar flag as shown in the listing above.",[2207,37982,37810],{"id":37810},[18,37984,37985],{},"Before we can see the Sonar report about the code quality and code coverage we have to configure maven to run jstd.",[18,37987,37988],{},"Unfortunately, the jstd-maven-plugin is not available at the Maven Central Repository. Therefore we have to add a new\nrepository and pluginRepository to our pom.xml:",[43,37990,37992],{"className":13786,"code":37991,"language":13788,"meta":48,"style":48},"\u003Crepository>\n \u003Cid>jstd-maven-plugin google code repo\u003C/id>\n \u003Curl>http://jstd-maven-plugin.googlecode.com/svn/maven2\u003C/url>\n\u003C/repository>\n\u003CpluginRepository>\n \u003Cid>jstd-maven-plugin google code repo\u003C/id>\n \u003Curl>http://jstd-maven-plugin.googlecode.com/svn/maven2\u003C/url>\n\u003C/pluginRepository>\n",[50,37993,37994,37999,38004,38009,38014,38019,38023,38027],{"__ignoreMap":48},[53,37995,37996],{"class":55,"line":56},[53,37997,37998],{},"\u003Crepository>\n",[53,38000,38001],{"class":55,"line":86},[53,38002,38003],{}," \u003Cid>jstd-maven-plugin google code repo\u003C/id>\n",[53,38005,38006],{"class":55,"line":126},[53,38007,38008],{}," \u003Curl>http://jstd-maven-plugin.googlecode.com/svn/maven2\u003C/url>\n",[53,38010,38011],{"class":55,"line":163},[53,38012,38013],{},"\u003C/repository>\n",[53,38015,38016],{"class":55,"line":186},[53,38017,38018],{},"\u003CpluginRepository>\n",[53,38020,38021],{"class":55,"line":221},[53,38022,38003],{},[53,38024,38025],{"class":55,"line":242},[53,38026,38008],{},[53,38028,38029],{"class":55,"line":273},[53,38030,38031],{},"\u003C/pluginRepository>\n",[18,38033,38034],{},"After that maven should be able to fetch the jstd-maven-plugin artifact:",[43,38036,38038],{"className":13786,"code":38037,"language":13788,"meta":48,"style":48},"\u003Cdependency>\n \u003CgroupId>com.googlecode.jstd-maven-plugin\u003C/groupId>\n \u003CartifactId>jstd-maven-plugin\u003C/artifactId>\n \u003Cversion>1.3.2.5\u003C/version>\n \u003Cscope>test\u003C/scope>\n\u003C/dependency>\n",[50,38039,38040,38044,38049,38054,38059,38063],{"__ignoreMap":48},[53,38041,38042],{"class":55,"line":56},[53,38043,36877],{},[53,38045,38046],{"class":55,"line":86},[53,38047,38048],{}," \u003CgroupId>com.googlecode.jstd-maven-plugin\u003C/groupId>\n",[53,38050,38051],{"class":55,"line":126},[53,38052,38053],{}," \u003CartifactId>jstd-maven-plugin\u003C/artifactId>\n",[53,38055,38056],{"class":55,"line":163},[53,38057,38058],{}," \u003Cversion>1.3.2.5\u003C/version>\n",[53,38060,38061],{"class":55,"line":186},[53,38062,35885],{},[53,38064,38065],{"class":55,"line":221},[53,38066,36897],{},[18,38068,38069],{},"To run our tests with a maven build we need the jstd-maven-plugin as a Maven plugin:",[43,38071,38073],{"className":13786,"code":38072,"language":13788,"meta":48,"style":48},"\u003Cplugin>\n \u003CgroupId>com.googlecode.jstd-maven-plugin\u003C/groupId>\n \u003CartifactId>jstd-maven-plugin\u003C/artifactId>\n \u003Cversion>1.3.2.5\u003C/version>\n \u003Cconfiguration>\n \u003Cbrowser>firefox\u003C/browser>\n \u003Cport>9876\u003C/port>\n \u003CtestOutput>target/jstestdriver\u003C/testOutput>\n \u003C/configuration>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cid>run-tests\u003C/id>\n \u003Cgoals>\n \u003Cgoal>test\u003C/goal>\n \u003C/goals>\n \u003C/execution>\n \u003C/executions>\n\u003C/plugin>\n",[50,38074,38075,38079,38083,38087,38091,38096,38101,38106,38111,38116,38121,38125,38130,38135,38140,38145,38149,38154],{"__ignoreMap":48},[53,38076,38077],{"class":55,"line":56},[53,38078,22511],{},[53,38080,38081],{"class":55,"line":86},[53,38082,38048],{},[53,38084,38085],{"class":55,"line":126},[53,38086,38053],{},[53,38088,38089],{"class":55,"line":163},[53,38090,38058],{},[53,38092,38093],{"class":55,"line":186},[53,38094,38095],{}," \u003Cconfiguration>\n",[53,38097,38098],{"class":55,"line":221},[53,38099,38100],{}," \u003Cbrowser>firefox\u003C/browser>\n",[53,38102,38103],{"class":55,"line":242},[53,38104,38105],{}," \u003Cport>9876\u003C/port>\n",[53,38107,38108],{"class":55,"line":273},[53,38109,38110],{}," \u003CtestOutput>target/jstestdriver\u003C/testOutput>\n",[53,38112,38113],{"class":55,"line":279},[53,38114,38115],{}," \u003C/configuration>\n",[53,38117,38118],{"class":55,"line":496},[53,38119,38120],{}," \u003Cexecutions>\n",[53,38122,38123],{"class":55,"line":503},[53,38124,30181],{},[53,38126,38127],{"class":55,"line":509},[53,38128,38129],{}," \u003Cid>run-tests\u003C/id>\n",[53,38131,38132],{"class":55,"line":515},[53,38133,38134],{}," \u003Cgoals>\n",[53,38136,38137],{"class":55,"line":521},[53,38138,38139],{}," \u003Cgoal>test\u003C/goal>\n",[53,38141,38142],{"class":55,"line":527},[53,38143,38144],{}," \u003C/goals>\n",[53,38146,38147],{"class":55,"line":533},[53,38148,30206],{},[53,38150,38151],{"class":55,"line":539},[53,38152,38153],{}," \u003C/executions>\n",[53,38155,38156],{"class":55,"line":545},[53,38157,22591],{},[18,38159,38160,38161,38166],{},"Three configuration flags are mandatory. More command line flags can be found in\nthe ",[585,38162,10174],{"href":38163,"rel":38164,"title":38165},"http://code.google.com/p/js-test-driver/wiki/CommandLineFlags",[589],"jstd-maven-plugin documentation"," of\njstd.",[1217,38168,38169],{"id":38169},"browser",[18,38171,38172],{},"a comma separated list of browsers (more exactly the path to the specific browser) that should be used for the tests",[1217,38174,38175],{"id":38175},"port",[18,38177,38178,38179],{},"the port that is set in ",[573,38180,37860],{},[1217,38182,38184],{"id":38183},"testoutput","testOutput",[18,38186,38187,38188,38191],{},"This specifies the directory where the code coverage reports (needed for Sonar) will be saved. The default directory for\nsonar is ",[573,38189,38190],{},"target/jstestdriver",", so remember to configure sonar accordingly, if you choose another directory.",[2207,38193,38195],{"id":38194},"set-the-sourcedirectory-in-the-pomxml","Set the sourceDirectory in the pom.xml",[18,38197,38198,38199,38202],{},"In order for Sonar to be able to analyze the JavaScript code and to visualize the reports, we must add the path to the\nsource code which is ",[573,38200,38201],{},"src/main/js"," in our case.",[43,38204,38206],{"className":13786,"code":38205,"language":13788,"meta":48,"style":48},"\u003Cbuild>\n \u003CsourceDirectory>src/main/js\u003C/sourceDirectory>\n \u003C!-- ... -->\n\u003C/build>\n",[50,38207,38208,38212,38217,38222],{"__ignoreMap":48},[53,38209,38210],{"class":55,"line":56},[53,38211,30083],{},[53,38213,38214],{"class":55,"line":86},[53,38215,38216],{}," \u003CsourceDirectory>src/main/js\u003C/sourceDirectory>\n",[53,38218,38219],{"class":55,"line":126},[53,38220,38221],{}," \u003C!-- ... -->\n",[53,38223,38224],{"class":55,"line":163},[53,38225,30256],{},[2207,38227,38229],{"id":38228},"run-the-tests-and-the-analysis","Run the tests and the analysis",[18,38231,38232],{},"Everything should be configured correctly now. So just start the maven build:",[43,38234,38236],{"className":13786,"code":38235,"language":13788,"meta":48,"style":48},"mvn jstd:test\n",[50,38237,38238],{"__ignoreMap":48},[53,38239,38240],{"class":55,"line":56},[53,38241,38235],{},[18,38243,38244],{},"JsTestDriver opens the defined browsers, runs all tests and generates the code coverage report. After that we have to\nstart the sonar build:",[43,38246,38248],{"className":13786,"code":38247,"language":13788,"meta":48,"style":48},"mvn sonar:sonar -Dsonar.language=js -Dsonar.branch=js\n",[50,38249,38250],{"__ignoreMap":48},[53,38251,38252],{"class":55,"line":56},[53,38253,38247],{},[18,38255,38256,38257,38260,38261,38264],{},"To tell sonar to analyze a JavaScript project the ",[573,38258,38259],{},"sonar.language"," property is essential. If the same project should be\nanalyzed as a Java project you may want to add a branch with the property ",[573,38262,38263],{},"sonar.branch",". Otherwise the previous values\nwill be overridden with this JavaScript analysis.",[18,38266,38267,8713,38271],{},[2223,38268],{"alt":38269,"src":38270},"\"JavaScript Plugin - Sonar\"","https://media.synyx.de/uploads//2012/08/js_sonar_01.png",[2223,38272],{"alt":38269,"src":38273},"https://media.synyx.de/uploads//2012/08/js_sonar_02.png",[2207,38275,38277],{"id":38276},"problems","Problems",[18,38279,38280],{},"An annoying problem is running the tests with real browsers like Firefox and Chrome. The maven build automatically\nstarts the browser and also closes it after the tests are finished. But Firefox is not correctly closed by jstd somehow…\nso the next test run fails because Firefox opens a dialog which must be closed manually. The maven build is deadlocked\nand you have to abort and rerun it…",[18,38282,38283],{},"So maybe a running Firefox process would be a workaround, I thought. Well, it kinda worked but the opened tab was not\nclosed anymore (tested on Linux and Windows). Each new test run opened a new tab and after a handful testruns the tests\nfailed because of some strange error. Closing tabs manually solved this, however. Same problem occurred with Chrome (\nVersion 22.0.1201.0 dev).",[18,38285,38286,38287,38291],{},"On one hand it is really nice to run the tests in all desired browsers, on the other hand closing tabs/browsers manually\nmakes it impossible to automate this process. So I’m really looking forward to Jasmine support of the sonar JavaScript\nPlugin to run headless tests as a maven build, just like jasmine-maven-plugin. A quick google search links to this\nproject: ",[585,38288,38289],{"href":38289,"rel":38290},"https://github.com/jwark/jstd-standalone-headless",[589]," Maybe this could be a solution… Any information is welcome,\nso if you have a working setup, please let me know.",[18,38293,38294,38295,38298,38299,38301],{},"Another problem surely is the mandatory specification of the ",[573,38296,38297],{},"sourceDirectory"," to be able to see the metrics in Sonar.\nUsually you will have a Java project with some JavaScript code. Therefore you certainly can’t pinpoint to ",[573,38300,38201],{},"\nas source directory of the project, for example. Further information is appreciated, again 🙂",[2207,38303,38305],{"id":38304},"todo","Todo",[18,38307,38308],{},[27,38309,38310],{},"automate the analysis within a Jenkins build process",[18,38312,38313,38314],{},"— ",[573,38315,38316],{},"maybe jstd tests can be run headless?",[18,38318,38313,38319],{},[573,38320,38321],{},"maybe maven profiles could be used to prevent the sourceDirectory declaration?",[18,38323,38324],{},[27,38325,38326],{},"require.js integration in jstd-unit-tests",[18,38328,38313,38329],{},[573,38330,38331,38336,38337,38341],{},[585,38332,38335],{"href":24506,"rel":38333,"title":38334},[589],"require.js","RequireJS"," is a JavaScript file and module loader\nand ",[585,38338,7366],{"href":38339,"rel":38340},"https://github.com/podefr/jasmine-reqjs-jstd/wiki/how-to-setup-requirejs---jasmine---jsTestDriver",[589]," should be\na good starting point.",[607,38343,989],{},{"title":48,"searchDepth":86,"depth":86,"links":38345},[38346,38347,38348,38349,38350,38351,38352],{"id":37772,"depth":86,"text":37773},{"id":37824,"depth":86,"text":37803},{"id":37810,"depth":86,"text":37810},{"id":38194,"depth":86,"text":38195},{"id":38228,"depth":86,"text":38229},{"id":38276,"depth":86,"text":38277},{"id":38304,"depth":86,"text":38305},[613,996,997],"2012-08-08T13:43:42","It is hard to imagine a web project without JavaScript code today. JavaScript is an easy to learn and very performant\\nscript language. In the past we have used JavaScript mostly for eye-candy and form validation. Recently we have been\\nasked more often to implement complex user interfaces with trees, sortable tables and things like that. So we decided to\\nrely more on JavaScript to improve the feedback of the website to user actions.","https://synyx.de/blog/visualize-javascript-code-quality-and-code-coverage-with-sonar/",{},"/blog/visualize-javascript-code-quality-and-code-coverage-with-sonar",{"title":35741,"description":37738},"blog/visualize-javascript-code-quality-and-code-coverage-with-sonar",[],"It is hard to imagine a web project without JavaScript code today. JavaScript is an easy to learn and very performant script language. In the past we have used JavaScript…","mrqNn4n0IH3ZbUdfxIC3kySm1Vw9wOmXI1qcURunod0",{"id":38365,"title":38366,"author":38367,"body":38368,"category":38999,"date":39000,"description":39001,"extension":617,"link":39002,"meta":39003,"navigation":499,"path":39004,"seo":39005,"slug":38372,"stem":39006,"tags":39007,"teaser":39008,"__hash__":39009},"blog/blog/migrating-data-with-liquibase.md","Migrating data with Liquibase",[19626],{"type":11,"value":38369,"toc":38997},[38370,38373,38376,38379,38382,38385,38432,38435,38512,38515,38529,38532,38535,38595,38598,38636,38639,38685,38688,38691,38705,38708,38774,38777,38780,38838,38841,38844,38847,38924,38927,38930,38972,38975,38989,38992,38995],[14,38371,38366],{"id":38372},"migrating-data-with-liquibase",[18,38374,38375],{},"Recently, we started integrating Liquibase as a database schema migration tool into most of my team’s projects, for both\nnew from-scratch projects and already existing ones. Liquibase is great because it allows us to use an SCM tool like\nGit to manage different revisions of an applications database schema – or more specifically, the changes required to\nmigrate the database schema from one revision to another.",[18,38377,38378],{},"While migrating database schemas seems like a pretty straight-forward task at the beginning, things get more\ncomplicated as soon as you want to roll back schema changes without dropping your database (and then rebuilding it).\nLiquibase also supports migrating your data across schema changes, in both directions. But lets start with the basics.",[18,38380,38381],{},"For this example, I only used the Liquibase command line interface, along with the basic MySQL command line client. Of\ncourse, Liquibase also integrates nicely with Maven (as a Maven goal) or Spring (as a bean that executes during context\ninitialization).",[18,38383,38384],{},"I start with a very basic table called „Person“, consisting only of an ID (primary key) and a name:",[43,38386,38388],{"className":13786,"code":38387,"language":13788,"meta":48,"style":48},"\nmysql> describe Person;\n+-------+--------------+------+-----+---------+----------------+\n| Field | Type | Null | Key | Default | Extra |\n+-------+--------------+------+-----+---------+----------------+\n| id | bigint(20) | NO | PRI | NULL | auto_increment |\n| name | varchar(255) | NO | UNI | NULL | |\n+-------+--------------+------+-----+---------+----------------+\n2 rows in set (0.00 sec)\n\n",[50,38389,38390,38394,38399,38404,38409,38413,38418,38423,38427],{"__ignoreMap":48},[53,38391,38392],{"class":55,"line":56},[53,38393,500],{"emptyLinePlaceholder":499},[53,38395,38396],{"class":55,"line":86},[53,38397,38398],{},"mysql> describe Person;\n",[53,38400,38401],{"class":55,"line":126},[53,38402,38403],{},"+-------+--------------+------+-----+---------+----------------+\n",[53,38405,38406],{"class":55,"line":163},[53,38407,38408],{},"| Field | Type | Null | Key | Default | Extra |\n",[53,38410,38411],{"class":55,"line":186},[53,38412,38403],{},[53,38414,38415],{"class":55,"line":221},[53,38416,38417],{},"| id | bigint(20) | NO | PRI | NULL | auto_increment |\n",[53,38419,38420],{"class":55,"line":242},[53,38421,38422],{},"| name | varchar(255) | NO | UNI | NULL | |\n",[53,38424,38425],{"class":55,"line":273},[53,38426,38403],{},[53,38428,38429],{"class":55,"line":279},[53,38430,38431],{},"2 rows in set (0.00 sec)\n",[18,38433,38434],{},"Liquibase uses so-called changesets, which are XML-snippets used to describe DDL statements. They are organized in\nchange log files. The following change set is used to create a table (via the „createTable“-tag) and two columns (via\nthe „column“-tag)",[43,38436,38438],{"className":3792,"code":38437,"language":3794,"meta":48,"style":48},"\n\u003Cdatabasechangelog xmlns=\"http://www.liquibase.org/xml/ns/dbchangelog\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemalocation=\"http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-2.0.xsd\">\n \u003Cchangeset author=\"mueller@synyx.de\" id=\"1\" runonchange=\"true\">\n \u003Ccreatetable tablename=\"Person\">\n \u003Ccolumn autoincrement=\"true\" name=\"id\" type=\"BIGINT\">\n \u003Cconstraints nullable=\"false\" primarykey=\"true\">\n \u003C/constraints>\n \u003C/column>\n \u003Ccolumn name=\"name\" type=\"VARCHAR(255)\">\n \u003Cconstraints nullable=\"false\">\n \u003C/constraints>\n \u003C/column>\n \u003C/createtable>\n \u003C/changeset>\n\u003C/databasechangelog>\n\n",[50,38439,38440,38444,38449,38454,38459,38464,38469,38474,38479,38484,38489,38493,38497,38502,38507],{"__ignoreMap":48},[53,38441,38442],{"class":55,"line":56},[53,38443,500],{"emptyLinePlaceholder":499},[53,38445,38446],{"class":55,"line":86},[53,38447,38448],{},"\u003Cdatabasechangelog xmlns=\"http://www.liquibase.org/xml/ns/dbchangelog\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemalocation=\"http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-2.0.xsd\">\n",[53,38450,38451],{"class":55,"line":126},[53,38452,38453],{}," \u003Cchangeset author=\"mueller@synyx.de\" id=\"1\" runonchange=\"true\">\n",[53,38455,38456],{"class":55,"line":163},[53,38457,38458],{}," \u003Ccreatetable tablename=\"Person\">\n",[53,38460,38461],{"class":55,"line":186},[53,38462,38463],{}," \u003Ccolumn autoincrement=\"true\" name=\"id\" type=\"BIGINT\">\n",[53,38465,38466],{"class":55,"line":221},[53,38467,38468],{}," \u003Cconstraints nullable=\"false\" primarykey=\"true\">\n",[53,38470,38471],{"class":55,"line":242},[53,38472,38473],{}," \u003C/constraints>\n",[53,38475,38476],{"class":55,"line":273},[53,38477,38478],{}," \u003C/column>\n",[53,38480,38481],{"class":55,"line":279},[53,38482,38483],{}," \u003Ccolumn name=\"name\" type=\"VARCHAR(255)\">\n",[53,38485,38486],{"class":55,"line":496},[53,38487,38488],{}," \u003Cconstraints nullable=\"false\">\n",[53,38490,38491],{"class":55,"line":503},[53,38492,38473],{},[53,38494,38495],{"class":55,"line":509},[53,38496,38478],{},[53,38498,38499],{"class":55,"line":515},[53,38500,38501],{}," \u003C/createtable>\n",[53,38503,38504],{"class":55,"line":521},[53,38505,38506],{}," \u003C/changeset>\n",[53,38508,38509],{"class":55,"line":527},[53,38510,38511],{},"\u003C/databasechangelog>\n",[18,38513,38514],{},"When I run Liquibase via command line, it sets up the „Person“ table. The relevant command is „update“:",[43,38516,38518],{"className":13786,"code":38517,"language":13788,"meta":48,"style":48},"\n./liquibase --url=jdbc:mysql://localhost:3306/liquiblog --driver=com.mysql.jdbc.Driver --username=root --password=\"\" --changeLogFile=db.changelog-0.1.0.xml \u003Cb>update\u003C/b>\n\n",[50,38519,38520,38524],{"__ignoreMap":48},[53,38521,38522],{"class":55,"line":56},[53,38523,500],{"emptyLinePlaceholder":499},[53,38525,38526],{"class":55,"line":86},[53,38527,38528],{},"./liquibase --url=jdbc:mysql://localhost:3306/liquiblog --driver=com.mysql.jdbc.Driver --username=root --password=\"\" --changeLogFile=db.changelog-0.1.0.xml \u003Cb>update\u003C/b>\n",[18,38530,38531],{},"Liquibase already knows how to roll back certain changesets, like the „createTable“ changeset above. If we call the\ncommand line client with „rollbackCount 1“ instead of „update“, it rolls back the last changeset it executed, and the\n„Person“ table is gone.",[18,38533,38534],{},"Other changesets cannot be rolled back automatically. Consider the following „insert“-changeset that inserts an entry\ninto our „Person“ table:",[43,38536,38538],{"className":3792,"code":38537,"language":3794,"meta":48,"style":48},"\n\u003Cdatabasechangelog xmlns=\"http://www.liquibase.org/xml/ns/dbchangelog\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemalocation=\"http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-2.0.xsd\">\n \u003Cchangeset author=\"mueller@synyx.de\" id=\"init-1\">\n \u003Cinsert tablename=\"Person\">\n \u003Ccolumn name=\"name\" value=\"John Doe\">\n \u003C/column>\n \u003C/insert>\n \u003Crollback>\n DELETE FROM Person WHERE name LIKE 'John Doe';\n \u003C/rollback>\n \u003C/changeset>\n\u003C/databasechangelog>\n\n",[50,38539,38540,38544,38548,38553,38558,38563,38567,38572,38577,38582,38587,38591],{"__ignoreMap":48},[53,38541,38542],{"class":55,"line":56},[53,38543,500],{"emptyLinePlaceholder":499},[53,38545,38546],{"class":55,"line":86},[53,38547,38448],{},[53,38549,38550],{"class":55,"line":126},[53,38551,38552],{}," \u003Cchangeset author=\"mueller@synyx.de\" id=\"init-1\">\n",[53,38554,38555],{"class":55,"line":163},[53,38556,38557],{}," \u003Cinsert tablename=\"Person\">\n",[53,38559,38560],{"class":55,"line":186},[53,38561,38562],{}," \u003Ccolumn name=\"name\" value=\"John Doe\">\n",[53,38564,38565],{"class":55,"line":221},[53,38566,38478],{},[53,38568,38569],{"class":55,"line":242},[53,38570,38571],{}," \u003C/insert>\n",[53,38573,38574],{"class":55,"line":273},[53,38575,38576],{}," \u003Crollback>\n",[53,38578,38579],{"class":55,"line":279},[53,38580,38581],{}," DELETE FROM Person WHERE name LIKE 'John Doe';\n",[53,38583,38584],{"class":55,"line":496},[53,38585,38586],{}," \u003C/rollback>\n",[53,38588,38589],{"class":55,"line":503},[53,38590,38506],{},[53,38592,38593],{"class":55,"line":509},[53,38594,38511],{},[18,38596,38597],{},"I manually added a „rollback“-tag containg an SQL statement that reverses the changset. Note that the „rollback“-tag\ncan contain either SQL statements as text or certain Liquibase refactoring tags. Since we now have two change log xml\nfiles, I created a „master“-file that imports the other files in the order in which they should be executed:",[43,38599,38601],{"className":3792,"code":38600,"language":3794,"meta":48,"style":48},"\n\u003C?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\u003Cdatabasechangelog xmlns=\"http://www.liquibase.org/xml/ns/dbchangelog/1.9\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemalocation=\"http://www.liquibase.org/xml/ns/dbchangelog/1.9\n http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-1.9.xsd\">\n \u003Cinclude file=\"db.changelog-0.1.0.xml\">\u003C/include>\n \u003Cinclude file=\"db.changelog-0.1.0.init.xml\">\u003C/include>\n\u003C/databasechangelog>\n\n",[50,38602,38603,38607,38612,38617,38622,38627,38632],{"__ignoreMap":48},[53,38604,38605],{"class":55,"line":56},[53,38606,500],{"emptyLinePlaceholder":499},[53,38608,38609],{"class":55,"line":86},[53,38610,38611],{},"\u003C?xml version=\"1.0\" encoding=\"UTF-8\"?>\n",[53,38613,38614],{"class":55,"line":126},[53,38615,38616],{},"\u003Cdatabasechangelog xmlns=\"http://www.liquibase.org/xml/ns/dbchangelog/1.9\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemalocation=\"http://www.liquibase.org/xml/ns/dbchangelog/1.9\n",[53,38618,38619],{"class":55,"line":163},[53,38620,38621],{}," http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-1.9.xsd\">\n",[53,38623,38624],{"class":55,"line":186},[53,38625,38626],{}," \u003Cinclude file=\"db.changelog-0.1.0.xml\">\u003C/include>\n",[53,38628,38629],{"class":55,"line":221},[53,38630,38631],{}," \u003Cinclude file=\"db.changelog-0.1.0.init.xml\">\u003C/include>\n",[53,38633,38634],{"class":55,"line":242},[53,38635,38511],{},[18,38637,38638],{},"If we run the „update“ command with the master change log file, it checks wether the first changeset was already\nexecuted (depending on you rolled it back or not) and then executes the second changeset that adds a „Person“ entry. To\nmake this work, Liquibase create a helper table called „DATABASECHANGELOGS“ containg already-executed change sets along\nwith a hash value (to make sure no-one modifies changesets once they have been executed):",[43,38640,38642],{"className":13786,"code":38641,"language":13788,"meta":48,"style":48},"\nmysql> select id, md5sum, description from DATABASECHANGELOG;\n+--------+------------------------------------+--------------+\n| id | md5sum | description |\n+--------+------------------------------------+--------------+\n| 1 | 3:5a36f447e90b35c3802cb6fe16cb12a7 | Create Table |\n| init-1 | 3:43c29e0011ebfcfd9cfbbb8450179a41 | Insert Row |\n+--------+------------------------------------+--------------+\n2 rows in set (0.00 sec)\n\n",[50,38643,38644,38648,38653,38658,38663,38667,38672,38677,38681],{"__ignoreMap":48},[53,38645,38646],{"class":55,"line":56},[53,38647,500],{"emptyLinePlaceholder":499},[53,38649,38650],{"class":55,"line":86},[53,38651,38652],{},"mysql> select id, md5sum, description from DATABASECHANGELOG;\n",[53,38654,38655],{"class":55,"line":126},[53,38656,38657],{},"+--------+------------------------------------+--------------+\n",[53,38659,38660],{"class":55,"line":163},[53,38661,38662],{},"| id | md5sum | description |\n",[53,38664,38665],{"class":55,"line":186},[53,38666,38657],{},[53,38668,38669],{"class":55,"line":221},[53,38670,38671],{},"| 1 | 3:5a36f447e90b35c3802cb6fe16cb12a7 | Create Table |\n",[53,38673,38674],{"class":55,"line":242},[53,38675,38676],{},"| init-1 | 3:43c29e0011ebfcfd9cfbbb8450179a41 | Insert Row |\n",[53,38678,38679],{"class":55,"line":273},[53,38680,38657],{},[53,38682,38683],{"class":55,"line":279},[53,38684,38431],{},[18,38686,38687],{},"Now that we got the basics running, lets try something more challenging: an actual change to our schema that requires\nboth schema and data migration. Our „Person“ table currently has only a name column, and we decided that we want to\nsplit it up into a „firstname“ and a „lastname“ column.",[18,38689,38690],{},"Before beginning work, I have Liquibase „tag“ the database so that we can roll back to this tag later on:",[43,38692,38694],{"className":13786,"code":38693,"language":13788,"meta":48,"style":48},"\n./liquibase --url=jdbc:mysql://localhost:3306/liquiblog --driver=com.mysql.jdbc.Driver --username=root --password=\"\" --changeLogFile=changelog-master.xml \u003Cb>tag liquiblog_0_1_0\u003C/b>\n\n",[50,38695,38696,38700],{"__ignoreMap":48},[53,38697,38698],{"class":55,"line":56},[53,38699,500],{"emptyLinePlaceholder":499},[53,38701,38702],{"class":55,"line":86},[53,38703,38704],{},"./liquibase --url=jdbc:mysql://localhost:3306/liquiblog --driver=com.mysql.jdbc.Driver --username=root --password=\"\" --changeLogFile=changelog-master.xml \u003Cb>tag liquiblog_0_1_0\u003C/b>\n",[18,38706,38707],{},"I created a new change set that adds the two new columns:",[43,38709,38711],{"className":3792,"code":38710,"language":3794,"meta":48,"style":48},"\n\u003Cchangeset author=\"mueller@synyx.de\" id=\"1\" runonchange=\"true\">\n \u003Caddcolumn tablename=\"Person\">\n \u003Ccolumn name=\"firstname\" type=\"VARCHAR(255)\">\n \u003Cconstraints nullable=\"false\">\n \u003C/constraints>\n \u003C/column>\n \u003Ccolumn name=\"lastname\" type=\"VARCHAR(255)\">\n \u003Cconstraints nullable=\"false\">\n \u003C/constraints>\n \u003C/column>\n \u003C/addcolumn>\n\u003C/changeset>\n\n",[50,38712,38713,38717,38722,38727,38732,38737,38742,38747,38752,38756,38760,38764,38769],{"__ignoreMap":48},[53,38714,38715],{"class":55,"line":56},[53,38716,500],{"emptyLinePlaceholder":499},[53,38718,38719],{"class":55,"line":86},[53,38720,38721],{},"\u003Cchangeset author=\"mueller@synyx.de\" id=\"1\" runonchange=\"true\">\n",[53,38723,38724],{"class":55,"line":126},[53,38725,38726],{}," \u003Caddcolumn tablename=\"Person\">\n",[53,38728,38729],{"class":55,"line":163},[53,38730,38731],{}," \u003Ccolumn name=\"firstname\" type=\"VARCHAR(255)\">\n",[53,38733,38734],{"class":55,"line":186},[53,38735,38736],{}," \u003Cconstraints nullable=\"false\">\n",[53,38738,38739],{"class":55,"line":221},[53,38740,38741],{}," \u003C/constraints>\n",[53,38743,38744],{"class":55,"line":242},[53,38745,38746],{}," \u003C/column>\n",[53,38748,38749],{"class":55,"line":273},[53,38750,38751],{}," \u003Ccolumn name=\"lastname\" type=\"VARCHAR(255)\">\n",[53,38753,38754],{"class":55,"line":279},[53,38755,38736],{},[53,38757,38758],{"class":55,"line":496},[53,38759,38741],{},[53,38761,38762],{"class":55,"line":503},[53,38763,38746],{},[53,38765,38766],{"class":55,"line":509},[53,38767,38768],{}," \u003C/addcolumn>\n",[53,38770,38771],{"class":55,"line":515},[53,38772,38773],{},"\u003C/changeset>\n",[18,38775,38776],{},"Once again, Liquibase knows how to roll back this change set, so we can skip the rollback tag.",[18,38778,38779],{},"Now that the table has two additional columns, we must take care of migrating our existing data to the new schema before\ndeleting the old, now obsolete „name“ column. Since data manipulation is not supported out-of-the-box by Liquibase,\nwe have to use its „sql“ tag to include native SQL statements within a changeset.",[43,38781,38783],{"className":3792,"code":38782,"language":3794,"meta":48,"style":48},"\n\u003Cchangeset author=\"mueller@synyx.de\" id=\"2\">\n \u003Csql>\n UPDATE Person SET firstname = SUBSTRING_INDEX(name, ' ', 1);\n UPDATE Person SET lastname = SUBSTRING_INDEX(name, ' ', -1);\n \u003C/sql>\n \u003Crollback>\n UPDATE Person SET firstname = '';\n UPDATE Person SET lastname = '';\n \u003C/rollback>\n\u003C/changeset>\n\n",[50,38784,38785,38789,38794,38799,38804,38809,38814,38819,38824,38829,38834],{"__ignoreMap":48},[53,38786,38787],{"class":55,"line":56},[53,38788,500],{"emptyLinePlaceholder":499},[53,38790,38791],{"class":55,"line":86},[53,38792,38793],{},"\u003Cchangeset author=\"mueller@synyx.de\" id=\"2\">\n",[53,38795,38796],{"class":55,"line":126},[53,38797,38798],{}," \u003Csql>\n",[53,38800,38801],{"class":55,"line":163},[53,38802,38803],{}," UPDATE Person SET firstname = SUBSTRING_INDEX(name, ' ', 1);\n",[53,38805,38806],{"class":55,"line":186},[53,38807,38808],{}," UPDATE Person SET lastname = SUBSTRING_INDEX(name, ' ', -1);\n",[53,38810,38811],{"class":55,"line":221},[53,38812,38813],{}," \u003C/sql>\n",[53,38815,38816],{"class":55,"line":242},[53,38817,38818],{}," \u003Crollback>\n",[53,38820,38821],{"class":55,"line":273},[53,38822,38823],{}," UPDATE Person SET firstname = '';\n",[53,38825,38826],{"class":55,"line":279},[53,38827,38828],{}," UPDATE Person SET lastname = '';\n",[53,38830,38831],{"class":55,"line":496},[53,38832,38833],{}," \u003C/rollback>\n",[53,38835,38836],{"class":55,"line":503},[53,38837,38773],{},[18,38839,38840],{},"Note that the content of the „rollback“-tag is kind of redundant, but the tag itself is required because Liquibase\nprevents us from rolling back changesets that cannot be rolled back implicitly and have no explicit rollback tag.",[18,38842,38843],{},"Once again, after executing Liquibase with the „update“-option, the new changeset is run, and our newly-created\n„firstname“ and „lastname“ columns now contain data.",[18,38845,38846],{},"Finally, I want to remove the old „name“ column.",[43,38848,38850],{"className":3792,"code":38849,"language":3794,"meta":48,"style":48},"\n\u003Cchangeset author=\"mueller@synyx.de\" id=\"3\" runonchange=\"true\">\n \u003Cdropcolumn columnname=\"name\" tablename=\"Person\">\n \u003C/dropcolumn>\n \u003Crollback>\n \u003Caddcolumn tablename=\"Person\">\n \u003Ccolumn name=\"name\" type=\"VARCHAR(255)\">\n \u003Cconstraints nullable=\"false\">\n \u003C/constraints>\n \u003C/column>\n \u003C/addcolumn>\n \u003Csql>\n UPDATE Person SET name = CONCAT(firstname, CONCAT(' ', lastname));\n \u003C/sql>\n \u003C/rollback>\n\u003C/changeset>\n\n",[50,38851,38852,38856,38861,38866,38871,38875,38880,38884,38888,38892,38896,38901,38906,38911,38916,38920],{"__ignoreMap":48},[53,38853,38854],{"class":55,"line":56},[53,38855,500],{"emptyLinePlaceholder":499},[53,38857,38858],{"class":55,"line":86},[53,38859,38860],{},"\u003Cchangeset author=\"mueller@synyx.de\" id=\"3\" runonchange=\"true\">\n",[53,38862,38863],{"class":55,"line":126},[53,38864,38865],{}," \u003Cdropcolumn columnname=\"name\" tablename=\"Person\">\n",[53,38867,38868],{"class":55,"line":163},[53,38869,38870],{}," \u003C/dropcolumn>\n",[53,38872,38873],{"class":55,"line":186},[53,38874,38818],{},[53,38876,38877],{"class":55,"line":221},[53,38878,38879],{}," \u003Caddcolumn tablename=\"Person\">\n",[53,38881,38882],{"class":55,"line":242},[53,38883,38483],{},[53,38885,38886],{"class":55,"line":273},[53,38887,38488],{},[53,38889,38890],{"class":55,"line":279},[53,38891,38473],{},[53,38893,38894],{"class":55,"line":496},[53,38895,38478],{},[53,38897,38898],{"class":55,"line":503},[53,38899,38900],{}," \u003C/addcolumn>\n",[53,38902,38903],{"class":55,"line":509},[53,38904,38905],{}," \u003Csql>\n",[53,38907,38908],{"class":55,"line":515},[53,38909,38910],{}," UPDATE Person SET name = CONCAT(firstname, CONCAT(' ', lastname));\n",[53,38912,38913],{"class":55,"line":521},[53,38914,38915],{}," \u003C/sql>\n",[53,38917,38918],{"class":55,"line":527},[53,38919,38833],{},[53,38921,38922],{"class":55,"line":533},[53,38923,38773],{},[18,38925,38926],{},"Again, the changeset itself is quite simple because Liquibase supports dropping columns, but the „rollback“-tag is more\ncomplicated: I first re-add the old „name“-column using the standart „addColumn“-tag, and then I used a custom SQL\nstatement to set the columns value.",[18,38928,38929],{},"We end up with a new database schema, complete with data:",[43,38931,38933],{"className":13786,"code":38932,"language":13788,"meta":48,"style":48},"\nmysql> select * from Person;\n+----+-----------+------------+\n| id | firstname | lastname |\n+----+-----------+------------+\n| 1 | John | Doe |\n+----+-----------+------------+\n1 rows in set (0.00 sec)\n\n",[50,38934,38935,38939,38944,38949,38954,38958,38963,38967],{"__ignoreMap":48},[53,38936,38937],{"class":55,"line":56},[53,38938,500],{"emptyLinePlaceholder":499},[53,38940,38941],{"class":55,"line":86},[53,38942,38943],{},"mysql> select * from Person;\n",[53,38945,38946],{"class":55,"line":126},[53,38947,38948],{},"+----+-----------+------------+\n",[53,38950,38951],{"class":55,"line":163},[53,38952,38953],{},"| id | firstname | lastname |\n",[53,38955,38956],{"class":55,"line":186},[53,38957,38948],{},[53,38959,38960],{"class":55,"line":221},[53,38961,38962],{},"| 1 | John | Doe |\n",[53,38964,38965],{"class":55,"line":242},[53,38966,38948],{},[53,38968,38969],{"class":55,"line":273},[53,38970,38971],{},"1 rows in set (0.00 sec)\n",[18,38973,38974],{},"Because we created a tag earlier and included rollback instructions in all our changesets, we can always roll back these\nmodifications without loosing any data! By running..",[43,38976,38978],{"className":13786,"code":38977,"language":13788,"meta":48,"style":48},"\n./liquibase --url=jdbc:mysql://localhost:3306/liquiblog --driver=com.mysql.jdbc.Driver --username=root --password=\"\" --changeLogFile=changelog-master.xml \u003Cb>rollback liquiblog_0_1_0\u003C/b>\n\n",[50,38979,38980,38984],{"__ignoreMap":48},[53,38981,38982],{"class":55,"line":56},[53,38983,500],{"emptyLinePlaceholder":499},[53,38985,38986],{"class":55,"line":86},[53,38987,38988],{},"./liquibase --url=jdbc:mysql://localhost:3306/liquiblog --driver=com.mysql.jdbc.Driver --username=root --password=\"\" --changeLogFile=changelog-master.xml \u003Cb>rollback liquiblog_0_1_0\u003C/b>\n",[18,38990,38991],{},"..we get our original database back!",[18,38993,38994],{},"Of course, the example with splitting / concatenating strings is a little far-fetched, but the same principles can be\napplied to more sophisticated refactorings. I came across the idea for this blog post when we had to split an existing\ndomain class (mapped to a single table) into an abstract base class and two subclasses, preferrably without losing data.",[607,38996,989],{},{"title":48,"searchDepth":86,"depth":86,"links":38998},[],[613],"2012-08-03T10:27:51","Recently, we started integrating Liquibase as a database schema migration tool into most of my team’s projects, for both\\nnew from-scratch projects and already existing ones. Liquibase is great because it allows us to use an SCM tool like\\nGit to manage different revisions of an applications database schema – or more specifically, the changes required to\\nmigrate the database schema from one revision to another.","https://synyx.de/blog/migrating-data-with-liquibase/",{},"/blog/migrating-data-with-liquibase",{"title":38366,"description":38375},"blog/migrating-data-with-liquibase",[30643,30646,6884,6885],"Recently, we started integrating Liquibase as a database schema migration tool into most of my team’s projects, for both new from-scratch projects and already existing ones. Liquibase is great because…","Vinp4k8Ts7cd7z-85P0-KPtpbF-5oJqchl7xudePQQI",{"id":39011,"title":39012,"author":39013,"body":39014,"category":39913,"date":39914,"description":39915,"extension":617,"link":39916,"meta":39917,"navigation":499,"path":39918,"seo":39919,"slug":39018,"stem":39920,"tags":39921,"teaser":39923,"__hash__":39924},"blog/blog/consolidating-development-environments-a-bash-magic-tutorial.md","Consolidating development environments – a Bash Magic tutorial",[32993],{"type":11,"value":39015,"toc":39911},[39016,39019,39022,39025,39028,39031,39051,39054,39064,39067,39070,39112,39115,39118,39299,39329,39334,39342,39347,39358,39361,39602,39605,39608,39842,39845,39849,39861,39865,39873,39876,39902,39905,39908],[14,39017,39012],{"id":39018},"consolidating-development-environments-a-bash-magic-tutorial",[18,39020,39021],{},"Developers have a tendency to not only work on a single project at once. Depending on those projects, there is a\nconstant struggle to keep your programming environment in sync with what you are actually doing. For that big legacy\nproduct you are maintaining you might need an old Java 1.5 in a specific version – for that fancy new web-app you\nmight be using the newest Java.",[18,39023,39024],{},"Different people have different strategies on how to take care of that problem. IDEs can be used to set software\nversions for specific projects, but using your build-tool from the commandline uses a completely different environment.\nAdapting your current command line environment is a little trickier and can solved in various ways – with varying\ncapabilities and problems.",[18,39026,39027],{},"This article is aimed at anyone wanting to learn a little more about Bash background magic – the resulting code will be\nworkable but will have to be fleshed out by the user to his own needs.",[18,39029,39030],{},"So, what do we actually want our “system” to do:",[577,39032,39033,39036,39039,39042,39045,39048],{},[580,39034,39035],{},"Control build environment (Java, Maven, …)",[580,39037,39038],{},"Special parameters for the build/execution environment (MAVEN_OPTS, …)",[580,39040,39041],{},"Switching environments while working",[580,39043,39044],{},"Switching environments based on project",[580,39046,39047],{},"Different environments in different terminals",[580,39049,39050],{},"More than one user should be able to reuse “environment profiles”",[18,39052,39053],{},"The first idea on how to switch an environment seems to be to create symlinks to the correct environment. That can be\npartly automated but isn’t really pretty and the environment is set for all terminals.",[18,39055,39056,39057,39063],{},"The way I will present here isn’t new, in fact ",[585,39058,39062],{"href":39059,"rel":39060,"title":39061},"http://rvm.io",[589],"rvm","http://rvm.io/"," probably does something very\nsimilar for Ruby (and is a lot more fleshed out most likely). But the technique is quite nice and makes switching\nenvironments quite unproblematic.",[18,39065,39066],{},"Having multiple users use the same kind of environment of course takes a little standardizing. The software for the\nbuild environment should be located at the same place for example. Our DevOps use Puppet to automate that, but that kind\nof automation isn’t really a requirement to use anything in this post.",[18,39068,39069],{},"Lets have a look how the heart of the technique looks like:",[43,39071,39073],{"className":45,"code":39072,"language":47,"meta":48,"style":48},"$ foo() { echo foo; }\n$ PROMPT_COMMAND=foo\nfoo\n$\n",[50,39074,39075,39095,39102,39107],{"__ignoreMap":48},[53,39076,39077,39079,39082,39084,39087,39090,39092],{"class":55,"line":56},[53,39078,34830],{"class":59},[53,39080,39081],{"class":63}," foo",[53,39083,23611],{"class":82},[53,39085,39086],{"class":63},"{",[53,39088,39089],{"class":63}," echo",[53,39091,39081],{"class":63},[53,39093,39094],{"class":82},"; }\n",[53,39096,39097,39099],{"class":55,"line":86},[53,39098,34830],{"class":59},[53,39100,39101],{"class":63}," PROMPT_COMMAND=foo\n",[53,39103,39104],{"class":55,"line":126},[53,39105,39106],{"class":59},"foo\n",[53,39108,39109],{"class":55,"line":163},[53,39110,39111],{"class":59},"$\n",[18,39113,39114],{},"Bash lets you run a command before each prompt. The command is able to influence the current environment – this is\nimportant, as you want to set the profile for the current shell and don’t want to spawn subshells all the time. It can\nalso set how the next prompt will look like, using the PS1 environment variable.",[18,39116,39117],{},"Lets take that idea, put it in a file and build a little bit of code around it to build a quite basic profile switcher (\nwhich we will call profilehandler or ph for short).",[43,39119,39121],{"className":45,"code":39120,"language":47,"meta":48,"style":48},"profilehandler() {\n if [ x\"$PH_PROFILE\" = x\"new\" ]; then\n export JAVA_HOME=/opt/software/jdk/jdk1.7.0_04\n elif [ x\"$PH_PROFILE\" = x\"old\" ]; then\n export JAVA_HOME=/opt/software/jdk/jdk1.5.0_22\n fi\n if [ -n \"$PH_PROFILE\" ]; then\n export PATH=\"$JAVA_HOME/bin:$PATH\"\n fi\n export PS1=\"${PH_PROFILE:+[$PH_PROFILE] }\\$ \"\n}\nPROMPT_COMMAND=profilehandler\n",[50,39122,39123,39130,39156,39169,39193,39204,39208,39227,39249,39253,39285,39289],{"__ignoreMap":48},[53,39124,39125,39128],{"class":55,"line":56},[53,39126,39127],{"class":59},"profilehandler",[53,39129,7291],{"class":82},[53,39131,39132,39134,39137,39139,39142,39144,39146,39149,39152,39154],{"class":55,"line":86},[53,39133,6749],{"class":389},[53,39135,39136],{"class":82}," [ x",[53,39138,2385],{"class":63},[53,39140,39141],{"class":82},"$PH_PROFILE",[53,39143,2385],{"class":63},[53,39145,1245],{"class":389},[53,39147,39148],{"class":82}," x",[53,39150,39151],{"class":63},"\"new\"",[53,39153,31295],{"class":82},[53,39155,6467],{"class":389},[53,39157,39158,39161,39164,39166],{"class":55,"line":126},[53,39159,39160],{"class":389}," export",[53,39162,39163],{"class":82}," JAVA_HOME",[53,39165,390],{"class":389},[53,39167,39168],{"class":82},"/opt/software/jdk/jdk1.7.0_04\n",[53,39170,39171,39174,39176,39178,39180,39182,39184,39186,39189,39191],{"class":55,"line":163},[53,39172,39173],{"class":389}," elif",[53,39175,39136],{"class":82},[53,39177,2385],{"class":63},[53,39179,39141],{"class":82},[53,39181,2385],{"class":63},[53,39183,1245],{"class":389},[53,39185,39148],{"class":82},[53,39187,39188],{"class":63},"\"old\"",[53,39190,31295],{"class":82},[53,39192,6467],{"class":389},[53,39194,39195,39197,39199,39201],{"class":55,"line":186},[53,39196,39160],{"class":389},[53,39198,39163],{"class":82},[53,39200,390],{"class":389},[53,39202,39203],{"class":82},"/opt/software/jdk/jdk1.5.0_22\n",[53,39205,39206],{"class":55,"line":221},[53,39207,32516],{"class":389},[53,39209,39210,39212,39214,39217,39219,39221,39223,39225],{"class":55,"line":242},[53,39211,6749],{"class":389},[53,39213,4104],{"class":82},[53,39215,39216],{"class":389},"-n",[53,39218,6452],{"class":63},[53,39220,39141],{"class":82},[53,39222,2385],{"class":63},[53,39224,31295],{"class":82},[53,39226,6467],{"class":389},[53,39228,39229,39231,39234,39236,39238,39241,39244,39247],{"class":55,"line":273},[53,39230,39160],{"class":389},[53,39232,39233],{"class":82}," PATH",[53,39235,390],{"class":389},[53,39237,2385],{"class":63},[53,39239,39240],{"class":82},"$JAVA_HOME",[53,39242,39243],{"class":63},"/bin:",[53,39245,39246],{"class":82},"$PATH",[53,39248,31375],{"class":63},[53,39250,39251],{"class":55,"line":279},[53,39252,32516],{"class":389},[53,39254,39255,39258,39261,39263,39266,39269,39271,39274,39276,39279,39282],{"class":55,"line":496},[53,39256,39257],{"class":389}," export",[53,39259,39260],{"class":82}," PS1",[53,39262,390],{"class":389},[53,39264,39265],{"class":63},"\"${",[53,39267,39268],{"class":82},"PH_PROFILE",[53,39270,4101],{"class":389},[53,39272,39273],{"class":63},"+[",[53,39275,39141],{"class":82},[53,39277,39278],{"class":63},"] }",[53,39280,39281],{"class":89},"\\$",[53,39283,39284],{"class":63}," \"\n",[53,39286,39287],{"class":55,"line":503},[53,39288,282],{"class":82},[53,39290,39291,39294,39296],{"class":55,"line":509},[53,39292,39293],{"class":82},"PROMPT_COMMAND",[53,39295,390],{"class":389},[53,39297,39298],{"class":63},"profilehandler\n",[43,39300,39302],{"className":45,"code":39301,"language":47,"meta":48,"style":48},"$ source profilehandler.sh\n$ PH_PROFILE=new\n[new] $ echo $JAVA_HOME/bin # -> 1.7\n",[50,39303,39304,39314,39321],{"__ignoreMap":48},[53,39305,39306,39308,39311],{"class":55,"line":56},[53,39307,34830],{"class":59},[53,39309,39310],{"class":63}," source",[53,39312,39313],{"class":63}," profilehandler.sh\n",[53,39315,39316,39318],{"class":55,"line":86},[53,39317,34830],{"class":59},[53,39319,39320],{"class":63}," PH_PROFILE=new\n",[53,39322,39323,39326],{"class":55,"line":126},[53,39324,39325],{"class":82},"[new] $ echo $JAVA_HOME/bin ",[53,39327,39328],{"class":3698},"# -> 1.7\n",[18,39330,39331],{},[27,39332,39333],{},"Good:",[577,39335,39336,39339],{},[580,39337,39338],{},"Can set profile",[580,39340,39341],{},"Shows profile in path",[18,39343,39344],{},[27,39345,39346],{},"Bad:",[577,39348,39349,39352,39355],{},[580,39350,39351],{},"Ugly interface",[580,39353,39354],{},"Leaks paths into PATH",[580,39356,39357],{},"Profiles are specified directly in code",[18,39359,39360],{},"Lets refactor that a little and make it slightly easier and safer to use.",[43,39362,39364],{"className":45,"code":39363,"language":47,"meta":48,"style":48},"# set up where our software/profiles will reside\nPH_HOME=$HOME/tmp/ph\nprofilehandler() {\n local profilepath\n # check if we have a valid profile\n if [ -f \"$PH_HOME/profiles/$PH_PROFILE\" ]; then\n profilepath=\"$PH_HOME/profiles/$PH_PROFILE\"\n fi\n # clean up\n local cleanpath=\"$(echo \"$PATH\" | sed 's#\\('$PH_HOME'[^:]*:\\)\\|\\(::\\)##g')\"\n export PATH=\"$cleanpath\"\n # a profile can be loaded, do it\n if [ -n \"$profilepath\" ]; then\n . \"$profilepath\"\n export PATH=\"${JAVA_HOME:+$JAVA_HOME/bin:}$PATH\"\n fi\n # set prompt\n export PS1=\"${PH_PROFILE:+[$PH_PROFILE] }\\$ \"\n}\n",[50,39365,39366,39371,39383,39389,39397,39402,39426,39443,39447,39452,39483,39498,39503,39522,39533,39565,39569,39574,39598],{"__ignoreMap":48},[53,39367,39368],{"class":55,"line":56},[53,39369,39370],{"class":3698},"# set up where our software/profiles will reside\n",[53,39372,39373,39376,39378,39380],{"class":55,"line":86},[53,39374,39375],{"class":82},"PH_HOME",[53,39377,390],{"class":389},[53,39379,30938],{"class":82},[53,39381,39382],{"class":63},"/tmp/ph\n",[53,39384,39385,39387],{"class":55,"line":126},[53,39386,39127],{"class":59},[53,39388,7291],{"class":82},[53,39390,39391,39394],{"class":55,"line":163},[53,39392,39393],{"class":389}," local",[53,39395,39396],{"class":82}," profilepath\n",[53,39398,39399],{"class":55,"line":186},[53,39400,39401],{"class":3698}," # check if we have a valid profile\n",[53,39403,39404,39406,39408,39410,39412,39415,39418,39420,39422,39424],{"class":55,"line":221},[53,39405,6749],{"class":389},[53,39407,4104],{"class":82},[53,39409,32625],{"class":389},[53,39411,6452],{"class":63},[53,39413,39414],{"class":82},"$PH_HOME",[53,39416,39417],{"class":63},"/profiles/",[53,39419,39141],{"class":82},[53,39421,2385],{"class":63},[53,39423,31295],{"class":82},[53,39425,6467],{"class":389},[53,39427,39428,39431,39433,39435,39437,39439,39441],{"class":55,"line":242},[53,39429,39430],{"class":82}," profilepath",[53,39432,390],{"class":389},[53,39434,2385],{"class":63},[53,39436,39414],{"class":82},[53,39438,39417],{"class":63},[53,39440,39141],{"class":82},[53,39442,31375],{"class":63},[53,39444,39445],{"class":55,"line":273},[53,39446,32516],{"class":389},[53,39448,39449],{"class":55,"line":279},[53,39450,39451],{"class":3698}," # clean up\n",[53,39453,39454,39456,39459,39461,39463,39465,39467,39469,39471,39473,39475,39478,39480],{"class":55,"line":496},[53,39455,39393],{"class":389},[53,39457,39458],{"class":82}," cleanpath",[53,39460,390],{"class":389},[53,39462,31169],{"class":63},[53,39464,4450],{"class":89},[53,39466,6452],{"class":63},[53,39468,39246],{"class":82},[53,39470,31903],{"class":63},[53,39472,4459],{"class":389},[53,39474,6327],{"class":59},[53,39476,39477],{"class":63}," 's#\\('",[53,39479,39414],{"class":82},[53,39481,39482],{"class":63},"'[^:]*:\\)\\|\\(::\\)##g')\"\n",[53,39484,39485,39487,39489,39491,39493,39496],{"class":55,"line":503},[53,39486,39257],{"class":389},[53,39488,39233],{"class":82},[53,39490,390],{"class":389},[53,39492,2385],{"class":63},[53,39494,39495],{"class":82},"$cleanpath",[53,39497,31375],{"class":63},[53,39499,39500],{"class":55,"line":509},[53,39501,39502],{"class":3698}," # a profile can be loaded, do it\n",[53,39504,39505,39507,39509,39511,39513,39516,39518,39520],{"class":55,"line":515},[53,39506,6749],{"class":389},[53,39508,4104],{"class":82},[53,39510,39216],{"class":389},[53,39512,6452],{"class":63},[53,39514,39515],{"class":82},"$profilepath",[53,39517,2385],{"class":63},[53,39519,31295],{"class":82},[53,39521,6467],{"class":389},[53,39523,39524,39527,39529,39531],{"class":55,"line":521},[53,39525,39526],{"class":89}," .",[53,39528,6452],{"class":63},[53,39530,39515],{"class":82},[53,39532,31375],{"class":63},[53,39534,39535,39537,39539,39541,39543,39546,39548,39550,39552,39554,39557,39559,39561,39563],{"class":55,"line":527},[53,39536,39160],{"class":389},[53,39538,39233],{"class":82},[53,39540,390],{"class":389},[53,39542,39265],{"class":63},[53,39544,39545],{"class":82},"JAVA_HOME",[53,39547,4101],{"class":389},[53,39549,11314],{"class":63},[53,39551,39240],{"class":82},[53,39553,4422],{"class":389},[53,39555,39556],{"class":82},"bin",[53,39558,4101],{"class":389},[53,39560,8659],{"class":63},[53,39562,39246],{"class":82},[53,39564,31375],{"class":63},[53,39566,39567],{"class":55,"line":533},[53,39568,32516],{"class":389},[53,39570,39571],{"class":55,"line":539},[53,39572,39573],{"class":3698}," # set prompt\n",[53,39575,39576,39578,39580,39582,39584,39586,39588,39590,39592,39594,39596],{"class":55,"line":545},[53,39577,39257],{"class":389},[53,39579,39260],{"class":82},[53,39581,390],{"class":389},[53,39583,39265],{"class":63},[53,39585,39268],{"class":82},[53,39587,4101],{"class":389},[53,39589,39273],{"class":63},[53,39591,39141],{"class":82},[53,39593,39278],{"class":63},[53,39595,39281],{"class":89},[53,39597,39284],{"class":63},[53,39599,39600],{"class":55,"line":2414},[53,39601,282],{"class":82},[18,39603,39604],{},"Let the pain of that code dump subside a little bit, then proceed reading. We don’t do much more than the previous\nsnippet, just load the profile from an external file (in $PH_HOME/profiles) and make sure that the PATH gets cleaned up.\nThis method of clearing is quite fickle – you need to have all software in PH_HOME and certain edge cases do not get\nhandled – but for our simple requirements, that’s just fine.",[18,39606,39607],{},"Lets also create a nice UI so we can set our profile more elegantly.",[43,39609,39611],{"className":45,"code":39610,"language":47,"meta":48,"style":48},"# make a nice useable interface\nph() {\n case \"$1\" in\n set)\n if ! [ -f \"$PH_HOME/profiles/$2\" ]; then\n echo >&2 \"Bad profile: $2\"\n return\n fi\n export PH_PROFILE=\"$2\"\n ;;\n enable)\n ph set \"${2:-default}\"\n export oldPS1=\"$PS1\"\n export PROMPT_COMMAND=profilehandler\n ;;\n disable)\n unset PH_PROFILE\n unset PROMPT_COMMAND\n export PS1=\"$oldPS1\"\n ;;\n *)\n echo \"Usage: ph enable [profile]|disable|set profile\"\n esac\n}\n",[50,39612,39613,39618,39625,39639,39646,39672,39687,39692,39697,39713,39718,39725,39748,39764,39775,39779,39786,39794,39801,39816,39820,39825,39833,39838],{"__ignoreMap":48},[53,39614,39615],{"class":55,"line":56},[53,39616,39617],{"class":3698},"# make a nice useable interface\n",[53,39619,39620,39623],{"class":55,"line":86},[53,39621,39622],{"class":59},"ph",[53,39624,7291],{"class":82},[53,39626,39627,39630,39632,39635,39637],{"class":55,"line":126},[53,39628,39629],{"class":389}," case",[53,39631,6452],{"class":63},[53,39633,39634],{"class":89},"$1",[53,39636,2385],{"class":63},[53,39638,31225],{"class":389},[53,39640,39641,39644],{"class":55,"line":163},[53,39642,39643],{"class":31230}," set",[53,39645,685],{"class":389},[53,39647,39648,39650,39653,39655,39657,39659,39661,39663,39666,39668,39670],{"class":55,"line":186},[53,39649,6436],{"class":389},[53,39651,39652],{"class":389}," !",[53,39654,4104],{"class":82},[53,39656,32625],{"class":389},[53,39658,6452],{"class":63},[53,39660,39414],{"class":82},[53,39662,39417],{"class":63},[53,39664,39665],{"class":89},"$2",[53,39667,2385],{"class":63},[53,39669,31295],{"class":82},[53,39671,6467],{"class":389},[53,39673,39674,39677,39680,39683,39685],{"class":55,"line":221},[53,39675,39676],{"class":89}," echo",[53,39678,39679],{"class":389}," >&2",[53,39681,39682],{"class":63}," \"Bad profile: ",[53,39684,39665],{"class":89},[53,39686,31375],{"class":63},[53,39688,39689],{"class":55,"line":242},[53,39690,39691],{"class":389}," return\n",[53,39693,39694],{"class":55,"line":273},[53,39695,39696],{"class":389}," fi\n",[53,39698,39699,39702,39705,39707,39709,39711],{"class":55,"line":279},[53,39700,39701],{"class":389}," export",[53,39703,39704],{"class":82}," PH_PROFILE",[53,39706,390],{"class":389},[53,39708,2385],{"class":63},[53,39710,39665],{"class":89},[53,39712,31375],{"class":63},[53,39714,39715],{"class":55,"line":496},[53,39716,39717],{"class":82}," ;;\n",[53,39719,39720,39723],{"class":55,"line":503},[53,39721,39722],{"class":31230}," enable",[53,39724,685],{"class":389},[53,39726,39727,39730,39733,39735,39738,39741,39744,39746],{"class":55,"line":509},[53,39728,39729],{"class":59}," ph",[53,39731,39732],{"class":63}," set",[53,39734,6452],{"class":63},[53,39736,39737],{"class":89},"${2",[53,39739,39740],{"class":389},":-",[53,39742,39743],{"class":82},"default",[53,39745,8659],{"class":89},[53,39747,31375],{"class":63},[53,39749,39750,39752,39755,39757,39759,39762],{"class":55,"line":515},[53,39751,39701],{"class":389},[53,39753,39754],{"class":82}," oldPS1",[53,39756,390],{"class":389},[53,39758,2385],{"class":63},[53,39760,39761],{"class":82},"$PS1",[53,39763,31375],{"class":63},[53,39765,39766,39768,39771,39773],{"class":55,"line":521},[53,39767,39701],{"class":389},[53,39769,39770],{"class":82}," PROMPT_COMMAND",[53,39772,390],{"class":389},[53,39774,39298],{"class":82},[53,39776,39777],{"class":55,"line":527},[53,39778,39717],{"class":82},[53,39780,39781,39784],{"class":55,"line":533},[53,39782,39783],{"class":31230}," disable",[53,39785,685],{"class":389},[53,39787,39788,39791],{"class":55,"line":539},[53,39789,39790],{"class":89}," unset",[53,39792,39793],{"class":63}," PH_PROFILE\n",[53,39795,39796,39798],{"class":55,"line":545},[53,39797,39790],{"class":89},[53,39799,39800],{"class":63}," PROMPT_COMMAND\n",[53,39802,39803,39805,39807,39809,39811,39814],{"class":55,"line":2414},[53,39804,39701],{"class":389},[53,39806,39260],{"class":82},[53,39808,390],{"class":389},[53,39810,2385],{"class":63},[53,39812,39813],{"class":82},"$oldPS1",[53,39815,31375],{"class":63},[53,39817,39818],{"class":55,"line":2426},[53,39819,39717],{"class":82},[53,39821,39822],{"class":55,"line":2438},[53,39823,39824],{"class":389}," *)\n",[53,39826,39827,39830],{"class":55,"line":2451},[53,39828,39829],{"class":89}," echo",[53,39831,39832],{"class":63}," \"Usage: ph enable [profile]|disable|set profile\"\n",[53,39834,39835],{"class":55,"line":2459},[53,39836,39837],{"class":389}," esac\n",[53,39839,39840],{"class":55,"line":2470},[53,39841,282],{"class":82},[18,39843,39844],{},"We now have some sanity checking, a way to enable and disable our environment switcher and some amount of cleanup code\nso we get our old prompt back.",[18,39846,39847],{},[27,39848,39333],{},[577,39850,39851,39853,39855,39858],{},[580,39852,39338],{},[580,39854,39341],{},[580,39856,39857],{},"Nicer interface",[580,39859,39860],{},"Profiles easily addable",[18,39862,39863],{},[27,39864,39346],{},[577,39866,39867,39870],{},[580,39868,39869],{},"Insufficient cleanup (PATH retains Java path on disable)",[580,39871,39872],{},"Lacks features",[18,39874,39875],{},"But hey, that’s only our second take. And there is much more room for improvement.",[577,39877,39878,39881,39884,39887,39890,39893,39896,39899],{},[580,39879,39880],{},"Automatic profile switching on project directories (maybe implemented by a dotfile in the project directory – the\nprofilehandler can search down the stack for that and set the profile accordingly)",[580,39882,39883],{},"Local and global profile paths, so shared profiles can be distributed easily and users can override global settings",[580,39885,39886],{},"Configuring how the prompt looks",[580,39888,39889],{},"Showing the branch of your favorite SCM inside the prompt",[580,39891,39892],{},"Allow profiles to do their own cleanup/initialize",[580,39894,39895],{},"Support more than just Java",[580,39897,39898],{},"List available profiles",[580,39900,39901],{},"Much more exhaustive environment cleanup on disable",[18,39903,39904],{},"Having the capability to call a function before each prompt and in the scope of the current shell is very handy. But\nthat also means you have a limitation on how much you can do. Adding features is a good thing, but you have to take care\nto not extend the running time to a user-noticeable amount. You should make sure to only call programs which are sure\nto return in a short amount of time. If you are working partly on a network mounted disk, keep in mind that this could\npotentially block your shell for an extended amount of time. If you’re using automount and something (like looking up\nthe current GIT branch) in your profilehandler is looking at each directory up to the root, this also can hurt\nperformance. Also, while extending the profilehandler, if you get a stuck shell which does not show a prompt anymore,\nkeep an eye open for programs reading from stdin.",[18,39906,39907],{},"No matter which method you are using, it is probably a good idea to document your environment settings so everyone in\nthe project has a similar setup. And automating that process also sounds like a very good idea. So have fun playing\naround!",[607,39909,39910],{},"html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .sJ8bj, html code.shiki .sJ8bj{--shiki-default:#6A737D;--shiki-dark:#6A737D}html pre.shiki code .sA_wV, html code.shiki .sA_wV{--shiki-default:#032F62;--shiki-dark:#DBEDFF}",{"title":48,"searchDepth":86,"depth":86,"links":39912},[],[6869,613],"2012-07-23T12:47:58","Developers have a tendency to not only work on a single project at once. Depending on those projects, there is a\\nconstant struggle to keep your programming environment in sync with what you are actually doing. For that big legacy\\nproduct you are maintaining you might need an old Java 1.5 in a specific version – for that fancy new web-app you\\nmight be using the newest Java.","https://synyx.de/blog/consolidating-development-environments-a-bash-magic-tutorial/",{},"/blog/consolidating-development-environments-a-bash-magic-tutorial",{"title":39012,"description":39021},"blog/consolidating-development-environments-a-bash-magic-tutorial",[47,7611,6282,39922,30756],"profiles","Developers have a tendency to not only work on a single project at once. Depending on those projects, there is a constant struggle to keep your programming environment in sync…","1DO3SrSal0CaXAOb3LB9CmmXX8wDLM5skOhuqLYUQd4",{"id":39926,"title":39927,"author":39928,"body":39929,"category":40181,"date":40182,"description":40183,"extension":617,"link":40184,"meta":40185,"navigation":499,"path":40186,"seo":40187,"slug":39933,"stem":40188,"tags":40189,"teaser":40194,"__hash__":40195},"blog/blog/scheduling-and-asynchronous-execution-with-spring.md","Scheduling and asynchronous execution with Spring",[30653],{"type":11,"value":39930,"toc":40175},[39931,39934,39937,39941,39944,39969,39973,39976,39979,40003,40006,40028,40031,40053,40056,40078,40081,40090,40093,40102,40106,40109,40112,40136,40139,40161,40165,40173],[14,39932,39927],{"id":39933},"scheduling-and-asynchronous-execution-with-spring",[18,39935,39936],{},"You want to execute cron jobs or call your methods asynchronously? Thanks to Spring’s annotation support for scheduling\nand asynchronous execution you can achieve this in a few minutes.",[649,39938,39940],{"id":39939},"some-xml-magic","Some xml magic",[18,39942,39943],{},"At first define your task executor and scheduler. The following lines will create an instance of ThreadPoolTaskExecutor\nand an instance of ThreadPoolTaskScheduler with the given pool sizes. The task element annotation-driven allows you to\nuse Spring’s annotations for scheduling and asynchronous execution within the beans defined in your application context.",[43,39945,39947],{"className":3792,"code":39946,"language":3794,"meta":48,"style":48},"\u003Cbean id=\"myClass\" class=\"my.project.path.myClass\" />\n\u003Ctask:annotation-driven executor=\"myExecutor\" scheduler=\"myScheduler\" />\n\u003Ctask:executor id=\"myExecutor\" pool-size=\"5\" />\n\u003Ctask:scheduler id=\"myScheduler\" pool-size=\"10\" />\n",[50,39948,39949,39954,39959,39964],{"__ignoreMap":48},[53,39950,39951],{"class":55,"line":56},[53,39952,39953],{},"\u003Cbean id=\"myClass\" class=\"my.project.path.myClass\" />\n",[53,39955,39956],{"class":55,"line":86},[53,39957,39958],{},"\u003Ctask:annotation-driven executor=\"myExecutor\" scheduler=\"myScheduler\" />\n",[53,39960,39961],{"class":55,"line":126},[53,39962,39963],{},"\u003Ctask:executor id=\"myExecutor\" pool-size=\"5\" />\n",[53,39965,39966],{"class":55,"line":163},[53,39967,39968],{},"\u003Ctask:scheduler id=\"myScheduler\" pool-size=\"10\" />\n",[649,39970,39972],{"id":39971},"the-scheduled-annotation","The @Scheduled annotation",[18,39974,39975],{},"With the @Scheduled annotation you can execute your method as a cron job. Using this annotation requires that the method\nto be scheduled must be of type void and must not expect any arguments. The following examples show you how to use the\n@Scheduled annotation.",[18,39977,39978],{},"If you want periodic scheduling you can use the property fixedRate. In this example the method would be executed every\n42 seconds.",[43,39980,39982],{"className":288,"code":39981,"language":290,"meta":48,"style":48},"@Scheduled(fixedRate = 42000)\npublic void execute() {\n // do something\n}\n",[50,39983,39984,39989,39994,39999],{"__ignoreMap":48},[53,39985,39986],{"class":55,"line":56},[53,39987,39988],{},"@Scheduled(fixedRate = 42000)\n",[53,39990,39991],{"class":55,"line":86},[53,39992,39993],{},"public void execute() {\n",[53,39995,39996],{"class":55,"line":126},[53,39997,39998],{}," // do something\n",[53,40000,40001],{"class":55,"line":163},[53,40002,282],{},[18,40004,40005],{},"If you prefer cron expressions you can use them either. The following example is analogue to the example above only\nusing cron expressions. The annotated method would be executed each full minute and every 7 seconds.",[43,40007,40009],{"className":288,"code":40008,"language":290,"meta":48,"style":48},"@Scheduled(cron = \"*/7 * * * * *\")\npublic void execute() {\n // do something\n}\n",[50,40010,40011,40016,40020,40024],{"__ignoreMap":48},[53,40012,40013],{"class":55,"line":56},[53,40014,40015],{},"@Scheduled(cron = \"*/7 * * * * *\")\n",[53,40017,40018],{"class":55,"line":86},[53,40019,39993],{},[53,40021,40022],{"class":55,"line":126},[53,40023,39998],{},[53,40025,40026],{"class":55,"line":163},[53,40027,282],{},[18,40029,40030],{},"Without question you have much more possibilities with cron expressions than with periodic scheduling. In this example\nyour method would be executed every weekday (Monday to Friday) on 9.45 am.",[43,40032,40034],{"className":288,"code":40033,"language":290,"meta":48,"style":48},"@Scheduled(cron = \"0 45 9 * * MON-FRI\")\npublic void execute() {\n // do something\n}\n",[50,40035,40036,40041,40045,40049],{"__ignoreMap":48},[53,40037,40038],{"class":55,"line":56},[53,40039,40040],{},"@Scheduled(cron = \"0 45 9 * * MON-FRI\")\n",[53,40042,40043],{"class":55,"line":86},[53,40044,39993],{},[53,40046,40047],{"class":55,"line":126},[53,40048,39998],{},[53,40050,40051],{"class":55,"line":163},[53,40052,282],{},[18,40054,40055],{},"A pretty cool feature is that you even can use placeholders for your cron expression which are resolved against the\nconfigured property-placeholder.",[43,40057,40059],{"className":288,"code":40058,"language":290,"meta":48,"style":48},"@Scheduled(cron = \"${myclass.cron.execute.sth}\")\npublic void execute() {\n // do something\n}\n",[50,40060,40061,40066,40070,40074],{"__ignoreMap":48},[53,40062,40063],{"class":55,"line":56},[53,40064,40065],{},"@Scheduled(cron = \"${myclass.cron.execute.sth}\")\n",[53,40067,40068],{"class":55,"line":86},[53,40069,39993],{},[53,40071,40072],{"class":55,"line":126},[53,40073,39998],{},[53,40075,40076],{"class":55,"line":163},[53,40077,282],{},[18,40079,40080],{},"Define where the properties are loaded from within your application context:",[43,40082,40084],{"className":3792,"code":40083,"language":3794,"meta":48,"style":48},"\u003Ccontext:property-placeholder location=\"classpath:application.properties\"/>\n",[50,40085,40086],{"__ignoreMap":48},[53,40087,40088],{"class":55,"line":56},[53,40089,40083],{},[18,40091,40092],{},"Then the properties are loaded from the file which contains your cron-expressions like this:",[43,40094,40096],{"className":13786,"code":40095,"language":13788,"meta":48,"style":48},"myclass.cron.execute.sth=0 45 9 * * MON-FRI\n",[50,40097,40098],{"__ignoreMap":48},[53,40099,40100],{"class":55,"line":56},[53,40101,40095],{},[649,40103,40105],{"id":40104},"the-async-annotation","The @Async annotation",[18,40107,40108],{},"The @Async annotation allows you to invoke your method asynchronously. The execution of the method will occur in a task\nthat has been submitted to the TaskExecutor defined in your application context. Contrary to the methods annotated with\nthe @Scheduled annotations the methods you annotate with @Async may be of other type than void and can expect arguments.",[18,40110,40111],{},"This is a simple example of a @Async annotated method without a return value.",[43,40113,40115],{"className":288,"code":40114,"language":290,"meta":48,"style":48},"@Async\nvoid execute(String string) {\n // do something asynchronously\n}\n",[50,40116,40117,40122,40127,40132],{"__ignoreMap":48},[53,40118,40119],{"class":55,"line":56},[53,40120,40121],{},"@Async\n",[53,40123,40124],{"class":55,"line":86},[53,40125,40126],{},"void execute(String string) {\n",[53,40128,40129],{"class":55,"line":126},[53,40130,40131],{}," // do something asynchronously\n",[53,40133,40134],{"class":55,"line":163},[53,40135,282],{},[18,40137,40138],{},"Like mentioned above your @Async annotated method may have a return value. However this return value must be of type\nFuture. This means that first the other tasks are performed and then is called get() on that Future.",[43,40140,40142],{"className":288,"code":40141,"language":290,"meta":48,"style":48},"@Async\nFuture\u003CString> execute(String string) {\n // do something asynchronously\n}\n",[50,40143,40144,40148,40153,40157],{"__ignoreMap":48},[53,40145,40146],{"class":55,"line":56},[53,40147,40121],{},[53,40149,40150],{"class":55,"line":86},[53,40151,40152],{},"Future\u003CString> execute(String string) {\n",[53,40154,40155],{"class":55,"line":126},[53,40156,40131],{},[53,40158,40159],{"class":55,"line":163},[53,40160,282],{},[649,40162,40164],{"id":40163},"further-information","Further information",[18,40166,40167,40168],{},"Have a look at\nthe ",[585,40169,40172],{"href":40170,"rel":40171},"http://static.springsource.org/spring/docs/3.1.x/spring-framework-reference/htmlsingle/spring-framework-reference.html#scheduling",[589],"Spring Framework Reference Documentation",[607,40174,989],{},{"title":48,"searchDepth":86,"depth":86,"links":40176},[40177,40178,40179,40180],{"id":39939,"depth":126,"text":39940},{"id":39971,"depth":126,"text":39972},{"id":40104,"depth":126,"text":40105},{"id":40163,"depth":126,"text":40164},[32772,613],"2012-06-13T15:49:15","You want to execute cron jobs or call your methods asynchronously? Thanks to Spring’s annotation support for scheduling\\nand asynchronous execution you can achieve this in a few minutes.","https://synyx.de/blog/scheduling-and-asynchronous-execution-with-spring/",{},"/blog/scheduling-and-asynchronous-execution-with-spring",{"title":39927,"description":39936},"blog/scheduling-and-asynchronous-execution-with-spring",[7311,40190,40191,40192,1010,40193],"scheduled","asynchronous","scheduling","spring-annotations","You want to execute cron jobs or call your methods asynchronously? Thanks to Spring’s annotation support for scheduling and asynchronous execution you can achieve this in a few minutes. Some…","fzW4lb1yw0kNbXB83aAmQzrzjrslpT-gtiBvUzbdR7Q",{"id":40197,"title":40198,"author":40199,"body":40200,"category":40366,"date":40367,"description":40368,"extension":617,"link":40369,"meta":40370,"navigation":499,"path":40371,"seo":40372,"slug":40204,"stem":40373,"tags":40374,"teaser":40377,"__hash__":40378},"blog/blog/performance-tuning-maven-opencms-builds-using-postgresql.md","Performance tuning Maven-OpenCms builds using PostgreSQL",[13434],{"type":11,"value":40201,"toc":40364},[40202,40205,40208,40233,40236,40243,40250,40265,40283,40290,40314,40321,40345,40348,40362],[14,40203,40198],{"id":40204},"performance-tuning-maven-opencms-builds-using-postgresql",[18,40206,40207],{},"Having a newly installed Ubuntu 12.04 on my machine, I noticed that building the OpenCms project I am currently working\non is a very time consuming process:",[43,40209,40211],{"className":13786,"code":40210,"language":13788,"meta":48,"style":48},"oli@rikit:~/develop/projects/foo$ time mvn clean install > mvn.log 2>&1\nreal 12m34.748s\nuser 2m10.132s\nsys 0m6.836s\n",[50,40212,40213,40218,40223,40228],{"__ignoreMap":48},[53,40214,40215],{"class":55,"line":56},[53,40216,40217],{},"oli@rikit:~/develop/projects/foo$ time mvn clean install > mvn.log 2>&1\n",[53,40219,40220],{"class":55,"line":86},[53,40221,40222],{},"real 12m34.748s\n",[53,40224,40225],{"class":55,"line":126},[53,40226,40227],{},"user 2m10.132s\n",[53,40229,40230],{"class":55,"line":163},[53,40231,40232],{},"sys 0m6.836s\n",[18,40234,40235],{},"Uh, more than 12 minutes… definitely too much; even for an OpenCms project.",[18,40237,40238,40239],{},"During the Maven builds install phase a lot of database write operations are performed: after an OpenCms module has been\nbuilt (it’s a multi-module project so there are more than one OpenCms modules), each OpenCms module gets deleted,\nre-imported and finally published into the OpenCms VFS. For more details on how we do build automation for OpenCms\nprojects with Maven, see ",[585,40240,10819],{"href":40241,"rel":40242},"http://blog.synyx.de/2011/04/maven-and-opencms/",[589],[18,40244,40245,40246,40249],{},"I searched the web and quickly found the reason for this performance issue: on my machine I have an ext4 file system;\nthe PostgreSQL server version installed is 9.1. And thats where PostgreSQLs ",[573,40247,40248],{},"Write Ahead Log"," (WAL) configuration\nsettings become interesting. In short, the PostgreSQL server uses synchronous commits by default, which means that\nPostgreSQL waits for ext4 to confirm that page images have been written to the permanent WAL storage on disk.",[18,40251,40252,40253,40256,40257,40260,40261,40264],{},"One solution is to set the configuration parameter ",[27,40254,40255],{},"fsync"," to value ",[27,40258,40259],{},"off"," (file postgresql.conf). This prevents\nPostgreSQL from performing any attempt to synchronize database write operations by never invoking the operating systems\nfsync() system call. This introduces the risk of ",[573,40262,40263],{},"data corruption"," in the event of a power failure or system crash.",[18,40266,40267,40268,40271,40272,40275,40276,40278,40279,40282],{},"Another option is switching to asynchronous transaction commits which means that the PostgreSQL server does no longer\nwait for confirmation that the transactions WAL records have been written on disk. Instead, it continues just after the\ntransaction commit is considered ",[573,40269,40270],{},"logically"," completed. This can be achieved by setting the configuration parameter *\n",[573,40273,40274],{},"synchronous","commit** to value ",[27,40277,40259],{}," (file postgresql.conf). This comes at the risk of ",[573,40280,40281],{},"data loss* (but not _data\ncorruption",", as with **fsync=off**).",[18,40284,40285,40286,40289],{},"After setting ",[27,40287,40288],{},"synchronous_commit=off"," the build process is much faster:",[43,40291,40293],{"className":13786,"code":40292,"language":13788,"meta":48,"style":48},"oli@rikit:~/develop/projects/foo$ time mvn clean install > mvn.log 2>&1\nreal 1m47.170s\nuser 2m9.856s\nsys 0m6.688s\n",[50,40294,40295,40299,40304,40309],{"__ignoreMap":48},[53,40296,40297],{"class":55,"line":56},[53,40298,40217],{},[53,40300,40301],{"class":55,"line":86},[53,40302,40303],{},"real 1m47.170s\n",[53,40305,40306],{"class":55,"line":126},[53,40307,40308],{},"user 2m9.856s\n",[53,40310,40311],{"class":55,"line":163},[53,40312,40313],{},"sys 0m6.688s\n",[18,40315,40316,40317,40320],{},"Applying ",[27,40318,40319],{},"fsync=off"," even saves some more seconds:",[43,40322,40324],{"className":13786,"code":40323,"language":13788,"meta":48,"style":48},"oli@rikit:~/develop/projects/foo$ time mvn clean install > mvn.log 2>&1\nreal 1m42.451s\nuser 2m8.744s\nsys 0m6.668s\n",[50,40325,40326,40330,40335,40340],{"__ignoreMap":48},[53,40327,40328],{"class":55,"line":56},[53,40329,40217],{},[53,40331,40332],{"class":55,"line":86},[53,40333,40334],{},"real 1m42.451s\n",[53,40336,40337],{"class":55,"line":126},[53,40338,40339],{},"user 2m8.744s\n",[53,40341,40342],{"class":55,"line":163},[53,40343,40344],{},"sys 0m6.668s\n",[18,40346,40347],{},"For more details on PostgreSQLs WAL mechanism and configuration options have a look at the PostgreSQL documentation:",[577,40349,40350,40356],{},[580,40351,40352],{},[585,40353,40354],{"href":40354,"rel":40355,"title":40354},"http://www.postgresql.org/docs/9.1/static/wal.html",[589],[580,40357,40358],{},[585,40359,40360],{"href":40360,"rel":40361,"title":40360},"http://www.postgresql.org/docs/9.1/static/runtime-config-wal.html",[589],[607,40363,989],{},{"title":48,"searchDepth":86,"depth":86,"links":40365},[],[613],"2012-05-28T21:11:08","Having a newly installed Ubuntu 12.04 on my machine, I noticed that building the OpenCms project I am currently working\\non is a very time consuming process:","https://synyx.de/blog/performance-tuning-maven-opencms-builds-using-postgresql/",{},"/blog/performance-tuning-maven-opencms-builds-using-postgresql",{"title":40198,"description":40207},"blog/performance-tuning-maven-opencms-builds-using-postgresql",[40375,10891,40376,1009],"ext4","opencms","Having a newly installed Ubuntu 12.04 on my machine, I noticed that building the OpenCms project I am currently working on is a very time consuming process: oli@rikit:~/develop/projects/foo$ time mvn…","l4LWFO3E0lbS9SWeYBTRbyMTIWzb4fRcVVSyBnBgziM",{"id":40380,"title":40381,"author":40382,"body":40383,"category":41031,"date":41032,"description":41033,"extension":617,"link":41034,"meta":41035,"navigation":499,"path":41036,"seo":41037,"slug":40387,"stem":41038,"tags":41039,"teaser":41046,"__hash__":41047},"blog/blog/how-to-monitor-and-manage-your-java-application-with-jmx.md","How to monitor and manage your Java application with JMX",[30653],{"type":11,"value":40384,"toc":41024},[40385,40388,40391,40406,40481,40484,40488,40491,40581,40584,40591,40595,40609,40614,40619,40642,40645,40650,40655,40679,40682,40704,40709,40714,40738,40744,40783,40792,40796,40799,40804,40807,40813,40818,40821,40827,40830,40833,40865,40868,40918,40922,40925,40928,40988,40991,40995,41001,41008,41015,41022],[14,40386,40381],{"id":40387},"how-to-monitor-and-manage-your-java-application-with-jmx",[18,40389,40390],{},"JMX (Java Management Extensions) provides the infrastructure to support monitoring and management of your Java\napplications. Resources you manage with JMX are called Managed Beans (MBeans). I want to show you how to quickly\nregister your own Service as MBean using Spring and Source-Level Metadata (JDK 5.0+ annotations).",[18,40392,40393,40394,40397,40398,40401,40402,40405],{},"The following sample is built on a tool that allows to manage the staffs’ applications for vacation digitally instead of\nusing paper. If a staff member applies for leave, the application gets the status ",[573,40395,40396],{},"waiting",". Then an authorized person (\nthe boss) has to decide about this application. It may be set to ",[573,40399,40400],{},"allowed"," or to ",[573,40403,40404],{},"rejected",". It might be that you want\nto have an overview of the applications and their status and you may even want to remind the authorized persons via\nemail to review the pending applications. Even if the vacation management tool has a web-based frontend for doing the\nmost of the actions, I think it still makes a good example for describing how to use JMX in your Java application. The\nfollowing class is a skeleton of the class which shall be exposed to JMX as MBean.",[43,40407,40409],{"className":288,"code":40408,"language":290,"meta":48,"style":48},"public class JmxDemo {\n private long numberOfWaitingApplications;\n public long getNumberOfWaitingApplications() {\n return numberOfWaitingApplications;\n }\n public long countApplicationsInStatus(String status) {\n // do something and return number of applications with the given status\n }\n public List\u003CString> showWaitingApplications() {\n // do something and return a list of all waiting applications\n }\n public String remindBossAboutWaitingApplications() {\n // remind the boss via email to decide about the waiting applications\n }\n}\n",[50,40410,40411,40416,40421,40426,40431,40435,40440,40445,40449,40454,40459,40463,40468,40473,40477],{"__ignoreMap":48},[53,40412,40413],{"class":55,"line":56},[53,40414,40415],{},"public class JmxDemo {\n",[53,40417,40418],{"class":55,"line":86},[53,40419,40420],{}," private long numberOfWaitingApplications;\n",[53,40422,40423],{"class":55,"line":126},[53,40424,40425],{}," public long getNumberOfWaitingApplications() {\n",[53,40427,40428],{"class":55,"line":163},[53,40429,40430],{}," return numberOfWaitingApplications;\n",[53,40432,40433],{"class":55,"line":186},[53,40434,860],{},[53,40436,40437],{"class":55,"line":221},[53,40438,40439],{}," public long countApplicationsInStatus(String status) {\n",[53,40441,40442],{"class":55,"line":242},[53,40443,40444],{}," // do something and return number of applications with the given status\n",[53,40446,40447],{"class":55,"line":273},[53,40448,860],{},[53,40450,40451],{"class":55,"line":279},[53,40452,40453],{}," public List\u003CString> showWaitingApplications() {\n",[53,40455,40456],{"class":55,"line":496},[53,40457,40458],{}," // do something and return a list of all waiting applications\n",[53,40460,40461],{"class":55,"line":503},[53,40462,860],{},[53,40464,40465],{"class":55,"line":509},[53,40466,40467],{}," public String remindBossAboutWaitingApplications() {\n",[53,40469,40470],{"class":55,"line":515},[53,40471,40472],{}," // remind the boss via email to decide about the waiting applications\n",[53,40474,40475],{"class":55,"line":521},[53,40476,860],{},[53,40478,40479],{"class":55,"line":527},[53,40480,282],{},[18,40482,40483],{},"If you want to use this class as a MBean, a few steps are necessary.",[649,40485,40487],{"id":40486},"_1-not-yet-another-xml-file","1. Not yet another xml file…",[18,40489,40490],{},"It’s best you create an extra xml file (let’s call it jmxContext.xml) for JMX configuration and import it in your\napplicationContext.xml. In your jmxContext.xml you define your MBean and the MBeanExporter.",[43,40492,40494],{"className":3792,"code":40493,"language":3794,"meta":48,"style":48},"\n\u003Cbean id=\"jmxDemo\" class=\"org.synyx.urlaubsverwaltung.jmx.JmxDemo\">\n \u003C!-- maybe you need contructor-injection -->\n \u003C!-- \u003Cconstructor-arg ref=\"myService\" /> -->\n\u003C/bean>\n \u003C!-- you may just copy the following lines -->\n\u003Cbean id=\"exporter\" class=\"org.springframework.jmx.export.MBeanExporter\" lazy-init=\"false\">\n\u003Cproperty name=\"autodetect\" value=\"true\"/>\n\u003Cproperty name=\"namingStrategy\" ref=\"namingStrategy\"/>\n\u003Cproperty name=\"assembler\" ref=\"assembler\"/>\n\u003C/bean>\n\u003Cbean id=\"jmxAttributeSource\" class=\"org.springframework.jmx.export.annotation.AnnotationJmxAttributeSource\"/>\n\u003Cbean id=\"assembler\" class=\"org.springframework.jmx.export.assembler.MetadataMBeanInfoAssembler\">\n\u003Cproperty name=\"attributeSource\" ref=\"jmxAttributeSource\"/>\n\u003C/bean>\n\u003Cbean id=\"namingStrategy\" class=\"org.springframework.jmx.export.naming.MetadataNamingStrategy\">\n\u003Cproperty name=\"attributeSource\" ref=\"jmxAttributeSource\"/>\n\u003C/bean>\n",[50,40495,40496,40500,40505,40510,40515,40520,40525,40530,40535,40540,40545,40549,40554,40559,40564,40568,40573,40577],{"__ignoreMap":48},[53,40497,40498],{"class":55,"line":56},[53,40499,500],{"emptyLinePlaceholder":499},[53,40501,40502],{"class":55,"line":86},[53,40503,40504],{},"\u003Cbean id=\"jmxDemo\" class=\"org.synyx.urlaubsverwaltung.jmx.JmxDemo\">\n",[53,40506,40507],{"class":55,"line":126},[53,40508,40509],{}," \u003C!-- maybe you need contructor-injection -->\n",[53,40511,40512],{"class":55,"line":163},[53,40513,40514],{}," \u003C!-- \u003Cconstructor-arg ref=\"myService\" /> -->\n",[53,40516,40517],{"class":55,"line":186},[53,40518,40519],{},"\u003C/bean>\n",[53,40521,40522],{"class":55,"line":221},[53,40523,40524],{}," \u003C!-- you may just copy the following lines -->\n",[53,40526,40527],{"class":55,"line":242},[53,40528,40529],{},"\u003Cbean id=\"exporter\" class=\"org.springframework.jmx.export.MBeanExporter\" lazy-init=\"false\">\n",[53,40531,40532],{"class":55,"line":273},[53,40533,40534],{},"\u003Cproperty name=\"autodetect\" value=\"true\"/>\n",[53,40536,40537],{"class":55,"line":279},[53,40538,40539],{},"\u003Cproperty name=\"namingStrategy\" ref=\"namingStrategy\"/>\n",[53,40541,40542],{"class":55,"line":496},[53,40543,40544],{},"\u003Cproperty name=\"assembler\" ref=\"assembler\"/>\n",[53,40546,40547],{"class":55,"line":503},[53,40548,40519],{},[53,40550,40551],{"class":55,"line":509},[53,40552,40553],{},"\u003Cbean id=\"jmxAttributeSource\" class=\"org.springframework.jmx.export.annotation.AnnotationJmxAttributeSource\"/>\n",[53,40555,40556],{"class":55,"line":515},[53,40557,40558],{},"\u003Cbean id=\"assembler\" class=\"org.springframework.jmx.export.assembler.MetadataMBeanInfoAssembler\">\n",[53,40560,40561],{"class":55,"line":521},[53,40562,40563],{},"\u003Cproperty name=\"attributeSource\" ref=\"jmxAttributeSource\"/>\n",[53,40565,40566],{"class":55,"line":527},[53,40567,40519],{},[53,40569,40570],{"class":55,"line":533},[53,40571,40572],{},"\u003Cbean id=\"namingStrategy\" class=\"org.springframework.jmx.export.naming.MetadataNamingStrategy\">\n",[53,40574,40575],{"class":55,"line":539},[53,40576,40563],{},[53,40578,40579],{"class":55,"line":545},[53,40580,40519],{},[18,40582,40583],{},"If your application is running inside a container such as Tomcat, you even don’t have to configure the MBeanServer\nbecause the container has its own one.",[18,40585,40586,40587,40590],{},"Setting MBeanExporter’s property ",[573,40588,40589],{},"autodetect"," to true, means that the MBeanExporter will register all the Beans within\nyour application’s context that are annotated in the way described in the next section as MBeans.",[649,40592,40594],{"id":40593},"_2-lets-transform-your-spring-bean-to-a-managed-bean","2. Let’s transform your Spring Bean to a Managed Bean!",[18,40596,40597,40598,40601,40602,40605,40606,986],{},"Spring uses information provided by annotations to generate MBeans. The attributes of the annotations are speaking for\nthemselves so further description isn’t necessary. To mark a Bean for export, it has to be annotated with\n",[573,40599,40600],{},"@ManagedResource",", Attributes are annotated with ",[573,40603,40604],{},"@ManagedAttribute"," and Methods with ",[573,40607,40608],{},"@ManagedOperation",[18,40610,40611],{},[27,40612,40613],{},"2.1 Bean",[18,40615,40616,40617,986],{},"Mark your Bean with ",[573,40618,40600],{},[43,40620,40622],{"className":288,"code":40621,"language":290,"meta":48,"style":48},"@ManagedResource(objectName = \"mbeans:name=myJmxDemoBean\", description = \"My managed Bean.\")\npublic class JmxDemo {\n // lot of stuff\n}\n",[50,40623,40624,40629,40633,40638],{"__ignoreMap":48},[53,40625,40626],{"class":55,"line":56},[53,40627,40628],{},"@ManagedResource(objectName = \"mbeans:name=myJmxDemoBean\", description = \"My managed Bean.\")\n",[53,40630,40631],{"class":55,"line":86},[53,40632,40415],{},[53,40634,40635],{"class":55,"line":126},[53,40636,40637],{}," // lot of stuff\n",[53,40639,40640],{"class":55,"line":163},[53,40641,282],{},[18,40643,40644],{},"Make sure that your MBean doesn’t contain ‘MBean’ in its name since it would be treated as a StandardMBean causing your\nannotations not to work.",[18,40646,40647],{},[27,40648,40649],{},"2.2 Attributes",[18,40651,40652,40653,986],{},"Annotate the Getter and Setter with ",[573,40654,40604],{},[43,40656,40658],{"className":288,"code":40657,"language":290,"meta":48,"style":48},"@ManagedAttribute(description = \"Get the number of all waiting applications\" )\npublic long getNumberOfWaitingApplications() {\n return numberOfWaitingApplications;\n}\n",[50,40659,40660,40665,40670,40675],{"__ignoreMap":48},[53,40661,40662],{"class":55,"line":56},[53,40663,40664],{},"@ManagedAttribute(description = \"Get the number of all waiting applications\" )\n",[53,40666,40667],{"class":55,"line":86},[53,40668,40669],{},"public long getNumberOfWaitingApplications() {\n",[53,40671,40672],{"class":55,"line":126},[53,40673,40674],{}," return numberOfWaitingApplications;\n",[53,40676,40677],{"class":55,"line":163},[53,40678,282],{},[18,40680,40681],{},"Exposing attributes may be:",[577,40683,40684,40687,40690,40693,40696,40699,40701],{},[580,40685,40686],{},"Basic types",[580,40688,40689],{},"Primitives and their wrappers",[580,40691,40692],{},"String",[580,40694,40695],{},"BigDecimal",[580,40697,40698],{},"BigInteger",[580,40700,36278],{},[580,40702,40703],{},"Arrays and collections of basic types",[18,40705,40706],{},[27,40707,40708],{},"2.2 Methods",[18,40710,40711,40712,986],{},"Annotate each method you wish to expose with ",[573,40713,40608],{},[43,40715,40717],{"className":288,"code":40716,"language":290,"meta":48,"style":48},"@ManagedOperation(description = \"Shows a list of all waiting applications with some information.\")\npublic List\u003CString> showWaitingApplications() {\n // do something and return a list of all waiting applications\n}\n",[50,40718,40719,40724,40729,40734],{"__ignoreMap":48},[53,40720,40721],{"class":55,"line":56},[53,40722,40723],{},"@ManagedOperation(description = \"Shows a list of all waiting applications with some information.\")\n",[53,40725,40726],{"class":55,"line":86},[53,40727,40728],{},"public List\u003CString> showWaitingApplications() {\n",[53,40730,40731],{"class":55,"line":126},[53,40732,40733],{}," // do something and return a list of all waiting applications\n",[53,40735,40736],{"class":55,"line":163},[53,40737,282],{},[18,40739,40740,40741,986],{},"If your methods have parameters you can describe them further with ",[573,40742,40743],{},"@ManagedOperationParameters",[43,40745,40747],{"className":288,"code":40746,"language":290,"meta":48,"style":48},"@ManagedOperation(description = \"Get the number of all applications that have the given status.\")\n@ManagedOperationParameters({\n @ManagedOperationParameter(name = \"status\", description = \"The status may be waiting, allowed, rejected or cancelled.\")\n})\npublic long countApplicationsInStatus(String state) {\n // do something and return number of applications with the given status\n}\n",[50,40748,40749,40754,40759,40764,40769,40774,40779],{"__ignoreMap":48},[53,40750,40751],{"class":55,"line":56},[53,40752,40753],{},"@ManagedOperation(description = \"Get the number of all applications that have the given status.\")\n",[53,40755,40756],{"class":55,"line":86},[53,40757,40758],{},"@ManagedOperationParameters({\n",[53,40760,40761],{"class":55,"line":126},[53,40762,40763],{}," @ManagedOperationParameter(name = \"status\", description = \"The status may be waiting, allowed, rejected or cancelled.\")\n",[53,40765,40766],{"class":55,"line":163},[53,40767,40768],{},"})\n",[53,40770,40771],{"class":55,"line":186},[53,40772,40773],{},"public long countApplicationsInStatus(String state) {\n",[53,40775,40776],{"class":55,"line":221},[53,40777,40778],{}," // do something and return number of applications with the given status\n",[53,40780,40781],{"class":55,"line":242},[53,40782,282],{},[18,40784,40785,40786,40788,40789,40791],{},"Make sure to annotate your Getter/Setter with ",[573,40787,40604],{}," and not with ",[573,40790,40608],{},". Otherwise your\nmethods won’t work.",[649,40793,40795],{"id":40794},"_3-try-it","3. Try it!",[18,40797,40798],{},"You can now use the functions of your MBean either with JConsole or with other tools. (e.g. JMinix)",[18,40800,40801],{},[27,40802,40803],{},"3.1 JConsole",[18,40805,40806],{},"JConsole is part of Oracle’s JDK, so you can just start it by executing the JConsole command in your JDK’s\nbinary-folder. You can connect to local or to remote Java Virtual Machines. If you are running your application on the\nsame host as JConsole it should show up at the ‘Local Process’ section.",[18,40808,40809],{},[2223,40810],{"alt":40811,"src":40812},"\"jconsole\"","https://media.synyx.de/uploads//2012/04/jconsole.png",[18,40814,40815],{},[27,40816,40817],{},"3.2 JMinix",[18,40819,40820],{},"If you want to have a JMX entry point in your web application instead of using JConsole, JMinix might be the right\nchoice for you.",[18,40822,40823],{},[2223,40824],{"alt":40825,"src":40826},"\"JMinix\"","https://media.synyx.de/uploads//2012/04/jminix.png",[18,40828,40829],{},"You can include it easily in your Maven based web application:",[18,40831,40832],{},"Add JMinix as dependency in your pom.xml",[43,40834,40836],{"className":3792,"code":40835,"language":3794,"meta":48,"style":48},"\n\u003Cdependency>\n \u003CgroupId>org.jminix\u003C/groupId>\n \u003CartifactId>jminix\u003C/artifactId>\n \u003Cversion>1.0.0\u003C/version>\n\u003C/dependency>\n",[50,40837,40838,40842,40846,40851,40856,40861],{"__ignoreMap":48},[53,40839,40840],{"class":55,"line":56},[53,40841,500],{"emptyLinePlaceholder":499},[53,40843,40844],{"class":55,"line":86},[53,40845,36877],{},[53,40847,40848],{"class":55,"line":126},[53,40849,40850],{}," \u003CgroupId>org.jminix\u003C/groupId>\n",[53,40852,40853],{"class":55,"line":163},[53,40854,40855],{}," \u003CartifactId>jminix\u003C/artifactId>\n",[53,40857,40858],{"class":55,"line":186},[53,40859,40860],{}," \u003Cversion>1.0.0\u003C/version>\n",[53,40862,40863],{"class":55,"line":221},[53,40864,36897],{},[18,40866,40867],{},"JMinix uses a simple HttpServlet that you have to register and map to an url-pattern in your web.xml",[43,40869,40871],{"className":3792,"code":40870,"language":3794,"meta":48,"style":48},"\u003C!-- JMX -->\n\u003Cservlet>\n \u003Cservlet-name>JmxMiniConsoleServlet\u003C/servlet-name>\n \u003Cservlet-class>org.jminix.console.servlet.MiniConsoleServlet\u003C/servlet-class>\n\u003C/servlet>\n\u003Cservlet-mapping>\n\u003Cservlet-name>JmxMiniConsoleServlet\u003C/servlet-name>\n\u003Curl-pattern>/jmx/*\u003C/url-pattern>\n\u003C/servlet-mapping>\n",[50,40872,40873,40878,40883,40888,40893,40898,40903,40908,40913],{"__ignoreMap":48},[53,40874,40875],{"class":55,"line":56},[53,40876,40877],{},"\u003C!-- JMX -->\n",[53,40879,40880],{"class":55,"line":86},[53,40881,40882],{},"\u003Cservlet>\n",[53,40884,40885],{"class":55,"line":126},[53,40886,40887],{}," \u003Cservlet-name>JmxMiniConsoleServlet\u003C/servlet-name>\n",[53,40889,40890],{"class":55,"line":163},[53,40891,40892],{}," \u003Cservlet-class>org.jminix.console.servlet.MiniConsoleServlet\u003C/servlet-class>\n",[53,40894,40895],{"class":55,"line":186},[53,40896,40897],{},"\u003C/servlet>\n",[53,40899,40900],{"class":55,"line":221},[53,40901,40902],{},"\u003Cservlet-mapping>\n",[53,40904,40905],{"class":55,"line":242},[53,40906,40907],{},"\u003Cservlet-name>JmxMiniConsoleServlet\u003C/servlet-name>\n",[53,40909,40910],{"class":55,"line":273},[53,40911,40912],{},"\u003Curl-pattern>/jmx/*\u003C/url-pattern>\n",[53,40914,40915],{"class":55,"line":279},[53,40916,40917],{},"\u003C/servlet-mapping>\n",[649,40919,40921],{"id":40920},"_4-notifications","4. Notifications",[18,40923,40924],{},"Notifications (javax.management.Notification) can be broadcast from your component to notify about something interesting\nhappening. This is only a simple example of using Notifications.",[18,40926,40927],{},"Example: You want to be notified if a user logs in.",[43,40929,40931],{"className":288,"code":40930,"language":290,"meta":48,"style":48},"@ManagedResource(objectName = \"mbeans:name=myJmxDemoBean\", description = \"Manage some 'Urlaubsverwaltung' problems.\")\npublic class JmxDemoReady implements NotificationPublisherAware {\n // lot of stuff\n private NotificationPublisher notificationPublisher;\n public void notifyAboutLogin(String msg) {\n notificationPublisher.sendNotification(new Notification(\"Login Action\", this, 0, msg));\n }\n @Override\n public void setNotificationPublisher(NotificationPublisher notificationPublisher) {\n this.notificationPublisher = notificationPublisher;\n }\n}\n",[50,40932,40933,40938,40943,40947,40952,40957,40962,40966,40970,40975,40980,40984],{"__ignoreMap":48},[53,40934,40935],{"class":55,"line":56},[53,40936,40937],{},"@ManagedResource(objectName = \"mbeans:name=myJmxDemoBean\", description = \"Manage some 'Urlaubsverwaltung' problems.\")\n",[53,40939,40940],{"class":55,"line":86},[53,40941,40942],{},"public class JmxDemoReady implements NotificationPublisherAware {\n",[53,40944,40945],{"class":55,"line":126},[53,40946,40637],{},[53,40948,40949],{"class":55,"line":163},[53,40950,40951],{}," private NotificationPublisher notificationPublisher;\n",[53,40953,40954],{"class":55,"line":186},[53,40955,40956],{}," public void notifyAboutLogin(String msg) {\n",[53,40958,40959],{"class":55,"line":221},[53,40960,40961],{}," notificationPublisher.sendNotification(new Notification(\"Login Action\", this, 0, msg));\n",[53,40963,40964],{"class":55,"line":242},[53,40965,860],{},[53,40967,40968],{"class":55,"line":273},[53,40969,9049],{},[53,40971,40972],{"class":55,"line":279},[53,40973,40974],{}," public void setNotificationPublisher(NotificationPublisher notificationPublisher) {\n",[53,40976,40977],{"class":55,"line":496},[53,40978,40979],{}," this.notificationPublisher = notificationPublisher;\n",[53,40981,40982],{"class":55,"line":503},[53,40983,860],{},[53,40985,40986],{"class":55,"line":509},[53,40987,282],{},[18,40989,40990],{},"With the NotificationPublisher you are able to create Notifications in a very simple way. At the right place in your\ncode, you inject your JmxDemo Bean and call the method notifyAboutLogin() when a user logs in. JConsole now displays a\nthird menu item called ‘Notifications’, besides ‘Attributes’ and ‘Operations’. If you click on ‘Subscribe’, you get a\nNotification every time a user logs in your web application.",[649,40992,40994],{"id":40993},"_5-further-information","5. Further information:",[18,40996,40997],{},[585,40998,40172],{"href":40999,"rel":41000},"http://static.springsource.org/spring/docs/3.1.x/spring-framework-reference/html/jmx.html",[589],[18,41002,41003],{},[585,41004,41007],{"href":41005,"rel":41006},"http://docs.oracle.com/javase/1.5.0/docs/guide/management/jconsole.html",[589],"About JConsole",[18,41009,41010],{},[585,41011,41014],{"href":41012,"rel":41013},"http://code.google.com/p/jminix/",[589],"About JMinix",[18,41016,41017],{},[585,41018,41021],{"href":41019,"rel":41020},"http://blog.synyx.de/2011/11/elektronische-urlaubsverwaltung-made-by-youngsters",[589],"About the vacation management web application",[607,41023,989],{},{"title":48,"searchDepth":86,"depth":86,"links":41025},[41026,41027,41028,41029,41030],{"id":40486,"depth":126,"text":40487},{"id":40593,"depth":126,"text":40594},{"id":40794,"depth":126,"text":40795},{"id":40920,"depth":126,"text":40921},{"id":40993,"depth":126,"text":40994},[32772,613],"2012-05-07T17:56:12","JMX (Java Management Extensions) provides the infrastructure to support monitoring and management of your Java\\napplications. Resources you manage with JMX are called Managed Beans (MBeans). I want to show you how to quickly\\nregister your own Service as MBean using Spring and Source-Level Metadata (JDK 5.0+ annotations).","https://synyx.de/blog/how-to-monitor-and-manage-your-java-application-with-jmx/",{},"/blog/how-to-monitor-and-manage-your-java-application-with-jmx",{"title":40381,"description":40390},"blog/how-to-monitor-and-manage-your-java-application-with-jmx",[41040,41041,41042,41043,41044,41045,9210,1010],"annotation","jconsole","jminix","jmx","mbeans","metadata","JMX (Java Management Extensions) provides the infrastructure to support monitoring and management of your Java applications. Resources you manage with JMX are called Managed Beans (MBeans). I want to show…","Lf-pqL8fxMUtFEesPQZ2fYbpNoUAqH8DOQaGCkCDsBA",{"id":41049,"title":41050,"author":41051,"body":41053,"category":41201,"date":41202,"description":41203,"extension":617,"link":41204,"meta":41205,"navigation":499,"path":41206,"seo":41207,"slug":41057,"stem":41208,"tags":41209,"teaser":41210,"__hash__":41211},"blog/blog/works-on-my-machine-developing-and-testing-continuous-delivery-with-vagrant.md","'Works on my machine!' – Developing and Testing Continuous Delivery with Vagrant",[41052],"buch",{"type":11,"value":41054,"toc":41199},[41055,41059,41062,41071,41080,41083,41110,41113,41125,41128,41190,41193,41196],[14,41056,41058],{"id":41057},"works-on-my-machine-developing-and-testing-continuous-delivery-with-vagrant","\"Works on my machine!\" – Developing and Testing Continuous Delivery with Vagrant",[18,41060,41061],{},"I still hear it often in teams, even in agile ones where unit tests, integration tests and continuous integration are\nintegrated in daily work. One team member says it’s working and another with slightly different local environment says\nit’s not. Even more serious: local environments often don’t reflect the deployment environments, for example because the\nfavorite Linux distribution of the developer is not the required server distribution or it just has slightly different\nconfiguration and software versions.",[18,41063,41064,41065,41070],{},"To solve these issues and for testing continuous deployments including provisioning I had an eye\non ",[585,41066,41069],{"href":41067,"rel":41068},"http://vagrantup.com/",[589],"Vagrant"," for a while. But with limited time I only tried small test installations until\nrecently.",[18,41072,41073,41074,41079],{},"Our marketing department is quite technical compared to other companies, yet still using non-Linux OS for various\nreasons. To edit and test new content for\nour ",[585,41075,41078],{"href":41076,"rel":41077},"http://blog.synyx.de/2012/03/new-homepage-with-nanoc-twitter-bootstrap-less-and-git/",[589],"homepage"," the local\ncompilation didn’t always work out as expected (tested and deployed on Linux by developers). Perfect testbed for a real\nworld usage of Vagrant.",[18,41081,41082],{},"The goal: Editors just installing Vagrant, updating their repository and starting via vagrant:",[43,41084,41086],{"className":30754,"code":41085,"language":30756,"meta":48,"style":48},"\ngit clone git://some.repository/path.git\nvagrant up\n\n",[50,41087,41088,41092,41102],{"__ignoreMap":48},[53,41089,41090],{"class":55,"line":56},[53,41091,500],{"emptyLinePlaceholder":499},[53,41093,41094,41096,41099],{"class":55,"line":86},[53,41095,8207],{"class":59},[53,41097,41098],{"class":63}," clone",[53,41100,41101],{"class":63}," git://some.repository/path.git\n",[53,41103,41104,41107],{"class":55,"line":126},[53,41105,41106],{"class":59},"vagrant",[53,41108,41109],{"class":63}," up\n",[18,41111,41112],{},"And everything is ready for usage in a started VM, first website already compiled and can be viewed by typing the VM’s\nIP into my local browser.",[18,41114,41115,41116,13006,41120,986],{},"On way might be to prepare a VirtualBox image with everything preinstalled, but the disadvantage is I need to maintain\nthe image with software updates, changes etc. manually. With Vagrant I tell it to use a basic small Debian or Ubuntu box\nwhich is downloaded from given URL and everything is provisioned via ",[585,41117,34285],{"href":41118,"rel":41119},"https://github.com/puppetlabs/puppet",[589],[585,41121,41124],{"href":41122,"rel":41123},"https://github.com/opscode/chef",[589],"Chef",[18,41126,41127],{},"Preparation was a combination of a small Vagrantfile and provisioning via Puppet. My Vagrantfile:",[43,41129,41131],{"className":24337,"code":41130,"language":24339,"meta":48,"style":48},"\nVagrant::Config.run do |config|.\n config.vm.box = \"lucid32\"\n config.vm.box_url = \"http://files.vagrantup.com/lucid32.box\"\n config.vm.boot_mode = :gui\n config.vm.network :hostonly, \"192.168.33.10\"\n config.vm.provision :puppet do |puppet|\n puppet.manifests_path = \"puppet/manifests\"\n puppet.module_path = \"puppet/modules\"\n puppet.manifest_file = \"init.pp\"\n end\nend\n\n",[50,41132,41133,41137,41142,41147,41152,41157,41162,41167,41172,41177,41182,41186],{"__ignoreMap":48},[53,41134,41135],{"class":55,"line":56},[53,41136,500],{"emptyLinePlaceholder":499},[53,41138,41139],{"class":55,"line":86},[53,41140,41141],{},"Vagrant::Config.run do |config|.\n",[53,41143,41144],{"class":55,"line":126},[53,41145,41146],{}," config.vm.box = \"lucid32\"\n",[53,41148,41149],{"class":55,"line":163},[53,41150,41151],{}," config.vm.box_url = \"http://files.vagrantup.com/lucid32.box\"\n",[53,41153,41154],{"class":55,"line":186},[53,41155,41156],{}," config.vm.boot_mode = :gui\n",[53,41158,41159],{"class":55,"line":221},[53,41160,41161],{}," config.vm.network :hostonly, \"192.168.33.10\"\n",[53,41163,41164],{"class":55,"line":242},[53,41165,41166],{}," config.vm.provision :puppet do |puppet|\n",[53,41168,41169],{"class":55,"line":273},[53,41170,41171],{}," puppet.manifests_path = \"puppet/manifests\"\n",[53,41173,41174],{"class":55,"line":279},[53,41175,41176],{}," puppet.module_path = \"puppet/modules\"\n",[53,41178,41179],{"class":55,"line":496},[53,41180,41181],{}," puppet.manifest_file = \"init.pp\"\n",[53,41183,41184],{"class":55,"line":503},[53,41185,24365],{},[53,41187,41188],{"class":55,"line":509},[53,41189,24444],{},[18,41191,41192],{},"The rest is installed and prepared via puppet modules and manifests on first “vagrant up” which I provide in the\nprojects repository in a puppet/ directory.",[18,41194,41195],{},"With the release of Vagrant 1.0 not long ago I think this has potential for a wider adoption of not only local setups\nof one or multiple VMs for development, but also to test more automation of various kinds for developers, testers,\nsysadmins -> DevOps. On the way to better Continuous Delivery.",[607,41197,41198],{},"html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}",{"title":48,"searchDepth":86,"depth":86,"links":41200},[],[613],"2012-03-23T17:49:14","I still hear it often in teams, even in agile ones where unit tests, integration tests and continuous integration are\\nintegrated in daily work. One team member says it’s working and another with slightly different local environment says\\nit’s not. Even more serious: local environments often don’t reflect the deployment environments, for example because the\\nfavorite Linux distribution of the developer is not the required server distribution or it just has slightly different\\nconfiguration and software versions.","https://synyx.de/blog/works-on-my-machine-developing-and-testing-continuous-delivery-with-vagrant/",{},"/blog/works-on-my-machine-developing-and-testing-continuous-delivery-with-vagrant",{"title":41050,"description":41061},"blog/works-on-my-machine-developing-and-testing-continuous-delivery-with-vagrant",[],"I still hear it often in teams, even in agile ones where unit tests, integration tests and continuous integration are integrated in daily work. One team member says it’s working…","JHm6eMBEOQWSrFbmkFAlA1f-fwO1HK1qxC3rdquHHOU",{"id":41213,"title":41214,"author":41215,"body":41216,"category":41367,"date":41368,"description":41369,"extension":617,"link":41370,"meta":41371,"navigation":499,"path":41372,"seo":41373,"slug":41220,"stem":41374,"tags":41375,"teaser":41379,"__hash__":41380},"blog/blog/new-homepage-with-nanoc-twitter-bootstrap-less-and-git.md","New Homepage with nanoc, Twitter Bootstrap, LESS and Git",[41052],{"type":11,"value":41217,"toc":41358},[41218,41221,41224,41233,41236,41240,41243,41252,41255,41259,41278,41282,41285,41289,41297,41301,41310,41314,41317,41326,41329,41332,41341,41344,41353,41356],[14,41219,41214],{"id":41220},"new-homepage-with-nanoc-twitter-bootstrap-less-and-git",[18,41222,41223],{},"With the redesign of our current homepage there was the chance to re-evaluate our requirements and make pragmatic\ndecisions filling our needs.",[18,41225,41226,41227,41232],{},"Our previous websites were always implemented in OpenCms since we are ",[585,41228,41231],{"href":41229,"rel":41230},"https://synyx.de/individualsoftware/",[589],"OpenCms","\nSolution Provider and contributor. Using a CMS like OpenCms seemed the natural decision and it worked well for several\nversions of our homepage. But given the amount of work needed to set it up, develop templates and administrate it, was\nit really what we needed?",[18,41234,41235],{},"A CMS is useful if you have many editors, granular access management with user roles, editor/publisher workflows etc.\nBut we are a team of technically versed people, even our marketing prefers to edit HTML directly instead of rely on a\nWYSIWYG editor where you don’t always know the resulting markup. So OpenCms was a little heavy-weight for our homepage\nwhere we didn’t need most of it’s features.",[2207,41237,41239],{"id":41238},"finding-the-right-tool-for-our-needs","Finding the right tool for our needs",[18,41241,41242],{},"Result of analyzing the old homepage was that 99% of it was static content. The only dynamic parts were blog aggregation\nand a contact form. Only for these two it wasn’t worth using a CMS or application framework. Yet we still wanted some\ntemplating and generate static HTML.",[18,41244,41245,41246,41251],{},"There are many static HTML generation tools. I’m not going into detail on which we looked into, just the solution we\ncame up with. Some evaluation lead to ",[585,41247,41250],{"href":41248,"rel":41249},"http://nanoc.stoneship.org/",[589],"nanoc"," which is simple to set-up, supports various\ntemplating formats, is very flexible, has good documentation and is easy to use.",[18,41253,41254],{},"What about the contact form and blog aggregation? Contact forms that only send an email are no added value to an email\naddress. Analysis of it’s usage on the old homepage resulted in that it attracted mainly spam and very few real\nmessages. Blog aggregation is sufficient every few minutes and doesn’t need to be live. So instead of dynamically\nfetching and rendering, it can be done by a cronjob and regenerating server-side.",[2207,41256,41258],{"id":41257},"templating","Templating",[18,41260,41261,41262,99,41267,4816,41272,41277],{},"For templating we\nuse ",[585,41263,41266],{"href":41264,"rel":41265},"https://ruby-doc.org/stdlib/libdoc/erb/rdoc/ERB.html",[589],"“ERB”",[585,41268,41271],{"href":41269,"rel":41270},"https://getbootstrap.com/",[589],"“Twitter Bootstrap”",[585,41273,41276],{"href":41274,"rel":41275},"https://lesscss.org",[589],"“LESS”"," for CSS. Nanoc supports precompiling LESS and compressing CSS.",[649,41279,41281],{"id":41280},"erb","ERB",[18,41283,41284],{},"ERB is part of Ruby’s standard library and quite similar to JSP templating in the Java world. All we need is HTML, some\nmeta data replacement from pages (e. g. a title) and snippet inclusion for which nanoc provides a rendering helper.",[649,41286,41288],{"id":41287},"less","LESS",[18,41290,41291,41292,41296],{},"LESS enables a more maintainable way of writing CSS since it extends CSS by dynamic behavior such as variables, mixins,\noperations and functions. We chose it over ",[585,41293,41295],{"href":22453,"rel":41294},[589],"SCSS"," because we also use Twitter Bootstrap. I think\nin modern CSS styling you should use either of them to make your CSS maintainable and readable. For example defining our\nnew CI colors as variables or mixins for common button stylings is a good use of these features.",[649,41298,41300],{"id":41299},"twitter-bootstrap","Twitter Bootstrap",[18,41302,41303,41304,41309],{},"The latest popular layouting/css toolkit for common things like a grid layout. In my point of view the main advantage to\nolder CSS frameworks like ",[585,41305,41308],{"href":41306,"rel":41307},"http://960.gs/",[589],"960 Grid System"," is the use of LESS in Twitter Bootstrap, so for example the\ngrid columns can be altered by changing variables.",[2207,41311,41313],{"id":41312},"deployment-and-publishing","Deployment and Publishing",[18,41315,41316],{},"We set-up deployment via Git. Basically a Git post-receive hook calls a shell script that loads the correct Ruby\nversion via rvm and executes a rake task:",[43,41318,41320],{"className":13786,"code":41319,"language":13788,"meta":48,"style":48},"rake deploy:post_receive\n",[50,41321,41322],{"__ignoreMap":48},[53,41323,41324],{"class":55,"line":56},[53,41325,41319],{},[18,41327,41328],{},"The rake task then updates dependencies via ‘bundle install’, fetches blog posts via RSS and Tweets via Twitter API,\ncleans up old files and compiles the site via nanoc.",[18,41330,41331],{},"So adding new content or changing some styling is as easy as doing that locally, commiting and publishing by",[43,41333,41335],{"className":13786,"code":41334,"language":13788,"meta":48,"style":48},"git push stage\n",[50,41336,41337],{"__ignoreMap":48},[53,41338,41339],{"class":55,"line":56},[53,41340,41334],{},[18,41342,41343],{},"or",[43,41345,41347],{"className":13786,"code":41346,"language":13788,"meta":48,"style":48},"git push live\n",[50,41348,41349],{"__ignoreMap":48},[53,41350,41351],{"class":55,"line":56},[53,41352,41346],{},[18,41354,41355],{},"The advantage of using Git in this way is not just the easy publishing, but also it’s main feature: versioning. Content\nchanges can be followed, reproduced and rolled back. For example if you have a marketing campaign and styled your page\nin X-mas colors, you can do so in a branch and after New Years you can switch back to the usual styling by switching\nthe branch again.",[607,41357,989],{},{"title":48,"searchDepth":86,"depth":86,"links":41359},[41360,41361,41366],{"id":41238,"depth":86,"text":41239},{"id":41257,"depth":86,"text":41258,"children":41362},[41363,41364,41365],{"id":41280,"depth":126,"text":41281},{"id":41287,"depth":126,"text":41288},{"id":41299,"depth":126,"text":41300},{"id":41312,"depth":86,"text":41313},[613],"2012-03-06T10:59:57","With the redesign of our current homepage there was the chance to re-evaluate our requirements and make pragmatic\\ndecisions filling our needs.","https://synyx.de/blog/new-homepage-with-nanoc-twitter-bootstrap-less-and-git/",{},"/blog/new-homepage-with-nanoc-twitter-bootstrap-less-and-git",{"title":41214,"description":41223},"blog/new-homepage-with-nanoc-twitter-bootstrap-less-and-git",[41376,10062,8207,41078,41287,41250,41377,40376,24339,41378,6885],"cms","open-source","styling","With the redesign of our current homepage there was the chance to re-evaluate our requirements and make pragmatic decisions filling our needs. Our previous websites were always implemented in OpenCms…","QKXG5zey8Vf0GGkf5yVxdjTPxZR1fNOEpeJFFbIENL4",{"id":41382,"title":41383,"author":41384,"body":41385,"category":41471,"date":41472,"description":41473,"extension":617,"link":41474,"meta":41475,"navigation":499,"path":41476,"seo":41477,"slug":41389,"stem":41479,"tags":41480,"teaser":41481,"__hash__":41482},"blog/blog/reasons-why-i-go-to-devoxx.md","Reasons why I go to Devoxx",[12981],{"type":11,"value":41386,"toc":41469},[41387,41390,41399,41406,41412,41430,41443,41456],[14,41388,41383],{"id":41389},"reasons-why-i-go-to-devoxx",[18,41391,41392,41393,41398],{},"Yet another year is almost over. One of the reasons I notice this is\nbecause ",[585,41394,41397],{"href":41395,"rel":41396},"http://devoxx.com/display/DV11/Home",[589],"Devoxx"," is coming up again. And – of course – Synyx is going to be there.\nIn the last year four of Synyx’ employees attended the full conference. This year all of last years visitors are going\nagain, and even three more. So there has to be a reason why it’s so popular. This post is going to be about Devoxx and\nwhy I personally enjoy going there. Well… there are several reasons…",[18,41400,41401,41402,41405],{},"A big challenge in our business is ",[27,41403,41404],{},"staying up to date",". There are plenty of books, articles, tweets and blogs to read\nin order know what is going on in the world of software development. And there are even more things to filter out and\nforget (at least for a while) because they don’t apply to you and your daily work. Sometimes you just don’t have enough\ntime for this because you already have loads of work with current technologies in your day-to-day work.",[18,41407,41408,41411],{},[27,41409,41410],{},"The talks at Devoxx keep me in sync with what is going on"," and what is (probably) important. It has pre-selected\ntalks with relevance to Java or me as a Java developer / architect.",[18,41413,41414,41415,41420,41421,41426,41427],{},"Considering last year’s Devoxx there were many things that came to our knowledge and are in use at Synyx now. Some\naffected the ways we work at Synyx (Hello, ",[585,41416,41419],{"href":41417,"rel":41418},"http://www.nealford.com/",[589],"Neal Ford","), some the tools, libraries and\nframeworks we are using now (e. g. ",[585,41422,41425],{"href":41423,"rel":41424},"http://wicket.apache.org/",[589],"Wicket",") and some simply increased our knowledge and\nbrought us up to date (like the “what’s new in” JPA, Spring or Java talks). ",[27,41428,41429],{},"Go see the stuff you can easily use in\nyour daily work.",[18,41431,41432,41433,41438,41439,41442],{},"In addition to the interesting topics of the talk you just have to look at\nthe ",[585,41434,41437],{"href":41435,"rel":41436},"http://devoxx.com/display/DV11/Devoxxians",[589],"Cast",". There are many well-known ",[27,41440,41441],{},"speakers"," which have great\nexperience with their topics and also know, how to present it to the audience. So it’s almost always a pleasure to\nlisten and learn from them because they are the best.",[18,41444,41445,41446,41449,41450,41455],{},"Another big thing that makes me really looking forward to next week is that I’ll spend a ",[27,41447,41448],{},"nice week in Antwerp with so\nmany of my co-workers and other friends",". I’m looking forward visiting ",[585,41451,41454],{"href":41452,"rel":41453},"http://www.kellys.be/",[589],"Kelly’s Irish Pub","\nagain, which is about 100m down the street from our hotel and has been our conference-table almost every evening/night\nlast year (and probably will be again this year).",[18,41457,41458,41459,41464,41465,41468],{},"And the last thing why I’m going there is optimism: As I requested in\nmy ",[585,41460,41463],{"href":41461,"rel":41462},"http://blog.synyx.de/2010/12/devoxx-2010-revisited/",[589],"last post about the conference"," they made the tickets more\nexpensive this year. So I hope they made this because they followed my suggestion to serve ",[27,41466,41467],{},"better coffee"," this\ntime :). Be sure, Synyx will report about it…",{"title":48,"searchDepth":86,"depth":86,"links":41470},[],[613],"2011-11-10T09:42:44","Yet another year is almost over. One of the reasons I notice this is\\nbecause Devoxx is coming up again. And – of course – Synyx is going to be there.\\nIn the last year four of Synyx’ employees attended the full conference. This year all of last years visitors are going\\nagain, and even three more. So there has to be a reason why it’s so popular. This post is going to be about Devoxx and\\nwhy I personally enjoy going there. Well… there are several reasons…","https://synyx.de/blog/reasons-why-i-go-to-devoxx/",{},"/blog/reasons-why-i-go-to-devoxx",{"title":41383,"description":41478},"Yet another year is almost over. One of the reasons I notice this is\nbecause Devoxx is coming up again. And – of course – Synyx is going to be there.\nIn the last year four of Synyx’ employees attended the full conference. This year all of last years visitors are going\nagain, and even three more. So there has to be a reason why it’s so popular. This post is going to be about Devoxx and\nwhy I personally enjoy going there. Well… there are several reasons…","blog/reasons-why-i-go-to-devoxx",[7721,18585,290],"Yet another year is almost over. One of the reasons I notice this is because Devoxx is coming up again. And – of course – Synyx is going to be…","q1nNnb-qWWPQ3xGDD2js7mhC58DjQdJwUG3qIhu7ND8",{"id":41484,"title":41485,"author":41486,"body":41487,"category":41582,"date":41583,"description":41584,"extension":617,"link":41585,"meta":41586,"navigation":499,"path":41587,"seo":41588,"slug":41589,"stem":41590,"tags":41591,"teaser":41594,"__hash__":41595},"blog/blog/schoener-schaukeln-mit-gradle.md","Schöner schaukeln mit Gradle?",[13434],{"type":11,"value":41488,"toc":41580},[41489,41492,41495,41502,41524,41530,41533,41552,41565,41568,41577],[14,41490,41485],{"id":41491},"schöner-schaukeln-mit-gradle",[18,41493,41494],{},"Die Konstruktion qualitativ hochwertiger Software setzt den Einsatz geeigneter Prozesse und Werkzeuge voraus. Von\nessentieller Bedeutung hinsichtlich der Qualität des Produkts ist der Build-Prozess: eine definierte Folge von\nSchritten die erforderlich sind, um aus einer Menge von Sourcecodedateien und sonstiger Ressourcen – sowie unter\nBerücksichtigung der Abhängigkeiten von Bibliotheken oder zwischen einzelnen Projektteilen – ein funktionierendes Ganzes\nzu bauen. Vereinfacht gesagt, denn dazu kommt noch die Ausführung einer Vielzahl von Unit- und Integrationstests (\nsowohl auf den Entwicklermaschinen als auch in Continuous Integration Umgebungen), das Erzeugen von Dokumentation,\nReleasemanagement, usw.",[18,41496,41497,41498,41501],{},"Schon bei Projekten mit überschaubarem Umfang müssen die Builds automatisiert werden (",[573,41499,41500],{},"build automation","). Dies ist der\neinzige Weg die richtige und fehlerfreie Abfolge der Schritte, welche den Build-Prozess ausmachen, zu gewährleisten.\nBuild-Performance spielt dabei eine nicht unwesentliche Rolle. Agile Prozesse (wie z.B. Scrum) mit ihren kurzen\nReleasezyklen werden erst durch die Automatisierung von Build-Prozessen möglich. Wir setzen derzeit für die meisten\nProjekte das Build-Tool Maven ein.",[18,41503,41504,41505,41507,41508,41513,41514,41517,41518,41523],{},"Passend zum Thema ",[573,41506,41500],{}," stellte Hans Dockter letzte Woche im Rahmen der regelmäßig stattfindenden Java User\nGroup (JUG KA) Veranstaltung das Build-Tool ",[585,41509,41512],{"href":41510,"rel":41511},"http://www.gradle.org/",[589],"Gradle"," vor: ",[573,41515,41516],{},"“Gradle wird den Build schon\nschaukeln”"," war der hoffnungsvoll klingende Titel des Vortrags. Hans Dockter ist Initiator des Gradle Projekts sowie CEO\nvon ",[585,41519,41522],{"href":41520,"rel":41521},"http://www.gradleware.com/",[589],"Gradleware",". In etwa zweieinhalb Stunden gab es Einblicke in die Konzepte und die\nFunktionsweise von Gradle, einige Anwendungsbeispiele, Antworten auf Fragen, Diskussionen sowie einen Ausblick auf\ngeplante Features in zukünftigen Releases (derzeit sind lediglich Milestone-Releases veröffentlicht; aktueller\nMilestone-Release ist Gradle 1.0-milestone-5).",[18,41525,41526],{},[2223,41527],{"alt":41528,"src":41529},"\"gradle\"","https://media.synyx.de/uploads//2011/11/gradle_logo.gif",[18,41531,41532],{},"Daß der Gradle-Zug schon ordentlich Fahrt aufgenommen hat ist mittlerweile nicht mehr zu übersehen: Hibernate, Spring\nSecurity und Grails beispielsweise sind auf Gradle-basierte Builds umgestiegen. Stellt sich noch die Frage, ob es sich\nlohnt jetzt auf den Zug aufzuspringen und Gradle als Build-Tool einzusetzen. Das hängt natürlich vom jeweiligen Projekt\nund den Anforderungen an den Build ab; und bei existierenden Projekten sicher auch vom bisher eingesetzten Build-Tool.\nEinen funktionierenden Maven-basierten Build möchte man nicht ersetzen, oder doch?",[18,41534,41535,41536,41539,41540,41543,41544,41547,41548,41551],{},"Maven verfolgt einen ",[573,41537,41538],{},"deklarativen"," Ansatz: es wird definiert ",[573,41541,41542],{},"was"," in den einzelnen Phasen des Builds zu tun ist und\nnicht ",[573,41545,41546],{},"wie"," es zu tun ist. Vergleicht man diesen Ansatz mit dem ",[573,41549,41550],{},"imperativen"," Ansatz von Ant (dort wird für jeden\nSchritt beschrieben wie etwas zu tun ist), erkennt man schnell den Vorteil den Maven gegenüber Ant bietet: erhöhte\nLesbarkeit und Wartbarkeit. Die Anforderungen an einen Build-Prozess ändern sich aber mit der Zeit. Erweiterbarkeit\nspielt dementsprechend eine wichtige Rolle. Und Flexibilität. Was Maven-Builds betrifft: Flexibilität ist hier leider\nein Problem.",[18,41553,41554,41555,41560,41561,41564],{},"Auch Gradle ist ein deklaratives Build-System. Bei der Beschreibung des Build-Prozesses steht immer das Was – und\nnicht das Wie – im Vordergrund. Builds werden mit einer ",[585,41556,41559],{"href":41557,"rel":41558},"https://groovy-lang.org/",[589],"Groovy","-DSL (Domain Specific\nLanguage) deklarativ beschrieben. Die DSL ist aber beliebig erweiterbar, d.h. daß im imperativen Stil neue ",[573,41562,41563],{},"Tasks","\ndefiniert werden können, welche im Build-Script als deklarative Elemente verwendbar sind. Der imperative Aspekt\ngarantiert hohe Flexibilität. Durch die Möglichkeit die DSL zu erweitern ist eine Trennung von imperativer Schicht und\ndeklarativer Schicht gegeben.",[18,41566,41567],{},"Maven dagegen zwingt dem Build ein starres (Phasen-)Modell auf, was Maven-Builds unflexibel macht. Scheinbar einfache\nAnforderungen wie das Handling von Unit- und Integrationstests werden zu nervigen Angelegenheiten und sind oftmals nur\numständlich zu erreichen. Darunter leidet dann wieder die Les- und Wartbarkeit. Andere Aufgaben lassen sich nur durch\nAnpassungen an Maven-Plugins oder durch Einbindung von Ant-Tasks umsetzen (zwar wird das Ausführen von Groovy Scripts\nunterstützt, aber ein Ausbrechen aus dem starren Phasenmodell von Maven ist auch damit nicht möglich).",[18,41569,41570,41571,41576],{},"Gradle integriert mit Build-Systemen wie Ant und Maven. Das macht Sinn, da die Build-Landschaft heterogen ist. So\nkönnen sowohl Ivy- als auch Maven-Repositories genutzt werden. Die aktive und schnell wachsende Community wird die\nIntegration mit anderen Tools (wie z.B. Entwicklungsumgebungen) weiter vorantreiben. Geplant ist ausserdem ein Portal\nfür Open Source Gradle Plugins. Ideale Voraussetzungen also um den Start mit Gradle zu wagen. Der Einstieg ist nicht\nschwer: der ",[585,41572,41575],{"href":41573,"rel":41574},"http://www.gradle.org/current/docs/userguide/userguide.html",[589],"Gradle User Guide"," ist hier ein guter\nAusgangspunkt.",[18,41578,41579],{},"Gradle ist Open Source und steht unter der Apache Software License Version 2.0",{"title":48,"searchDepth":86,"depth":86,"links":41581},[],[613],"2011-11-07T18:46:28","Die Konstruktion qualitativ hochwertiger Software setzt den Einsatz geeigneter Prozesse und Werkzeuge voraus. Von\\nessentieller Bedeutung hinsichtlich der Qualität des Produkts ist der Build-Prozess: eine definierte Folge von\\nSchritten die erforderlich sind, um aus einer Menge von Sourcecodedateien und sonstiger Ressourcen – sowie unter\\nBerücksichtigung der Abhängigkeiten von Bibliotheken oder zwischen einzelnen Projektteilen – ein funktionierendes Ganzes\\nzu bauen. Vereinfacht gesagt, denn dazu kommt noch die Ausführung einer Vielzahl von Unit- und Integrationstests (\\nsowohl auf den Entwicklermaschinen als auch in Continuous Integration Umgebungen), das Erzeugen von Dokumentation,\\nReleasemanagement, usw.","https://synyx.de/blog/schoener-schaukeln-mit-gradle/",{},"/blog/schoener-schaukeln-mit-gradle",{"title":41485,"description":41494},"schoener-schaukeln-mit-gradle","blog/schoener-schaukeln-mit-gradle",[27213,41592,41593],"build-automation","gradle","Die Konstruktion qualitativ hochwertiger Software setzt den Einsatz geeigneter Prozesse und Werkzeuge voraus. Von essentieller Bedeutung hinsichtlich der Qualität des Produkts ist der Build-Prozess: eine definierte Folge von Schritten die…","3NEpqkITzbrz4U3Tem4vKQx0YE038gKKYs6fQeCxPOg",{"id":41597,"title":41598,"author":41599,"body":41601,"category":41916,"date":41917,"description":41918,"extension":617,"link":41919,"meta":41920,"navigation":499,"path":41921,"seo":41922,"slug":41605,"stem":41923,"tags":41924,"teaser":41926,"__hash__":41927},"blog/blog/make-software-projects-fit-for-git.md","Make software-projects fit for git",[41600],"ruessel",{"type":11,"value":41602,"toc":41914},[41603,41606,41609,41615,41620,41642,41645,41651,41656,41666,41672,41677,41680,41710,41715,41718,41747,41750,41778,41781,41784,41852,41855,41858,41863,41866,41869,41872,41902,41905,41908,41911],[14,41604,41598],{"id":41605},"make-software-projects-fit-for-git",[18,41607,41608],{},"More and more Projects at our company are taking advantage of distributed and local revision control by using git. So to\nmake a complete software-project fit for git, by not only using git-svn with subversion and git on top, some more\nsteps are required than just handling files with git, learning its syntax and understanding the way it works…",[18,41610,41611],{},[2223,41612],{"alt":41613,"src":41614},"\"git-logo\"","https://media.synyx.de/uploads//2011/10/git-logo.png",[18,41616,41617,986],{},[573,41618,41619],{},"Source code has to be accessible",[18,41621,41622,41623,41629,41630,41636,41637,41641],{},"We are used to use subversion, a central repository with the leading stage of developement, when using git – all\nrepositories are equal. To take the best of both worlds we host a git-repository, which is defined to be leading (only\nby convention). We like to have things under control, so we\nuse ",[585,41624,41628],{"href":41625,"rel":41626,"title":41627},"http://eagain.net/gitweb/?p=gitosis.git",[589],"Gitosis download","gitosis"," to serve these repositories, but we think\nabout using ",[585,41631,41635],{"href":41632,"rel":41633,"title":41634},"https://github.com/sitaramc/gitolite/wiki",[589],"gitolite on GitHub","gitolite"," because of better/easier\naccess-management. You can also host at ",[585,41638,41640],{"href":34144,"rel":41639,"title":15107},[589],"GitHub",", they do great work and it´s their daily\nbusiness.",[18,41643,41644],{},"What else do we need to develop an amazing piece of software in addition to good code and a working methodology? Which\ntools assist the development process and need capability to handle git-repositories?",[18,41646,41647],{},[2223,41648],{"alt":41649,"src":41650},"\"chiliproject-logo\"","https://media.synyx.de/uploads//2011/10/chiliproject-logo.png",[18,41652,41653,986],{},[573,41654,41655],{},"Software should do what it is intended for",[18,41657,41658,41659,41665],{},"To reach this goal we collect production requirements and fragment the subsequent work into processable packets with\na *\n*project-management tool** called redmine, more precisely ",[585,41660,41664],{"href":41661,"rel":41662,"title":41663},"https://www.chiliproject.org/",[589],"ChiliProject","**chiliproject\n**",", an rapidly evolving fork of redmine.",[18,41667,41668],{},[2223,41669],{"alt":41670,"src":41671},"\"jenkins_logo\"","https://media.synyx.de/uploads//2011/10/jenkins_logo.png",[18,41673,41674,1073],{},[573,41675,41676],{},"Software is something executable",[18,41678,41679],{},"plain source-code is for documentation purposes 😉",[18,41681,41682,41683,41687,41688,41691,41692,41696,41697,41703,41704,8780],{},"We have to build it. Most of our projects are written in Java and built\nwith ",[585,41684,41686],{"href":24570,"rel":41685,"title":37782},[589],"Apache Maven",". To build the written code automatically and pursue the process of\n",[27,41689,41690],{},"continuous integration"," we utilize a tool named hudson, more precisely ",[585,41693,41695],{"href":35244,"rel":41694,"title":35246},[589],"**Jenkins\n**",", an Oracle-independent fork (yeah we like using forks, especially when main\ndevelopers from the origin project are switching to the new fork, if you are interested in all of our reasons read\nthe ",[585,41698,41702],{"href":41699,"rel":41700,"title":41701},"http://blog.synyx.de/2011/05/opensource-is-not-just-about-the-license/",[589],"opensource is not just about the license","blogpost","\nwritten by ",[585,41705,41709],{"href":41706,"rel":41707,"title":41708},"http://blog.synyx.de/autoren/?uid=14",[589],"Fabian Buch","Fabian",[18,41711,41712],{},[27,41713,41714],{},"So first mission is to make ChiliProject fit for git.",[18,41716,41717],{},"Luckily ChiliProject can handle git-repositories out of the box, but the repositories have to be cloned to localhost(\nthe machine running chiliproject). This can be achieved by giving read-rights to the user running chiliproject, in our\ncase this is generating a passwordless ssh-keypair, deploy the public part of it to gitosis and explicitly give rights\nto this public-key. To use the generated private-key every time we use git (in conjunction with ssh) we have to modify\nthe file ~/.ssh/config:",[43,41719,41721],{"className":45,"code":41720,"language":47,"meta":48,"style":48},"Host git.domain.tld\nUser git\nIdentityFile ~/.ssh/git_key.priv\n",[50,41722,41723,41731,41739],{"__ignoreMap":48},[53,41724,41725,41728],{"class":55,"line":56},[53,41726,41727],{"class":59},"Host",[53,41729,41730],{"class":63}," git.domain.tld\n",[53,41732,41733,41736],{"class":55,"line":86},[53,41734,41735],{"class":59},"User",[53,41737,41738],{"class":63}," git\n",[53,41740,41741,41744],{"class":55,"line":126},[53,41742,41743],{"class":59},"IdentityFile",[53,41745,41746],{"class":63}," ~/.ssh/git_key.priv\n",[18,41748,41749],{},"Now we need to clone the repository manually by login to the machine running Chili and do:",[43,41751,41753],{"className":45,"code":41752,"language":47,"meta":48,"style":48},"mkdir /path/to/git/repositories/\ncd /path/to/git/repositories/\ngit clone git@gitosis.domain.tld:gitrepo.git\n",[50,41754,41755,41763,41769],{"__ignoreMap":48},[53,41756,41757,41760],{"class":55,"line":56},[53,41758,41759],{"class":59},"mkdir",[53,41761,41762],{"class":63}," /path/to/git/repositories/\n",[53,41764,41765,41767],{"class":55,"line":86},[53,41766,6879],{"class":89},[53,41768,41762],{"class":63},[53,41770,41771,41773,41775],{"class":55,"line":126},[53,41772,8207],{"class":59},[53,41774,41098],{"class":63},[53,41776,41777],{"class":63}," git@gitosis.domain.tld:gitrepo.git\n",[18,41779,41780],{},"for sure git has to be installed on the server running Chili. And the repository already exists…",[18,41782,41783],{},"but how do we keep this cloned repository up to date? We solved this by installing a cronjob running as the user,\nrunning Chili, every 5 minutes:",[43,41785,41787],{"className":45,"code":41786,"language":47,"meta":48,"style":48},"*/5 * * * * for i in /path/to/git/repositories/*/; do cd $i && git fetch; git reset refs/remotes/origin/master; done >>/dev/null 2>&1\n",[50,41788,41789],{"__ignoreMap":48},[53,41790,41791,41793,41796,41798,41800,41802,41804,41806,41808,41810,41813,41815,41818,41821,41824,41826,41829,41831,41833,41836,41839,41841,41843,41846,41849],{"class":55,"line":56},[53,41792,32273],{"class":389},[53,41794,41795],{"class":82},"/5 ",[53,41797,32273],{"class":389},[53,41799,1058],{"class":389},[53,41801,1058],{"class":389},[53,41803,1058],{"class":389},[53,41805,144],{"class":389},[53,41807,32587],{"class":82},[53,41809,32368],{"class":389},[53,41811,41812],{"class":63}," /path/to/git/repositories/*/",[53,41814,31207],{"class":82},[53,41816,41817],{"class":389},"do",[53,41819,41820],{"class":89}," cd",[53,41822,41823],{"class":82}," $i && ",[53,41825,8207],{"class":59},[53,41827,41828],{"class":63}," fetch",[53,41830,31207],{"class":82},[53,41832,8207],{"class":59},[53,41834,41835],{"class":63}," reset",[53,41837,41838],{"class":63}," refs/remotes/origin/master",[53,41840,31207],{"class":82},[53,41842,14606],{"class":389},[53,41844,41845],{"class":389}," >>",[53,41847,41848],{"class":82},"/dev/null ",[53,41850,41851],{"class":389},"2>&1\n",[18,41853,41854],{},"jap, you can log into a file instead of /dev/null, but we trust… 🙂",[18,41856,41857],{},"that´s it you can now add the local repository to your project in ChiliProject, but give full path including the\n“.git”-folder, it is little fussy in this point.",[18,41859,41860],{},[27,41861,41862],{},"Get Jenkins to work with git",[18,41864,41865],{},"First, we have to do the same things done for ChiliProject, install the git-binary on the system running Jenkins,\ngenerate ssh-keypair, give read-rights to the user(possible stumbling block: we are running jenkins in a\njava-servlet-container so it´s the user running this container!)",[18,41867,41868],{},"modify ~/.ssh/config. Now we should be able to manually clone the targeted repositories, but that´s not what we want (\nremember automatically and continous integration?)",[18,41870,41871],{},"In order to be able to tag the built release version, the user running jenkins needs to give an author to the\ngit-repository, so modify/create ~/.gitconfig of this user:",[43,41873,41875],{"className":45,"code":41874,"language":47,"meta":48,"style":48},"[user]\n name = \"Jenkins\"\n email = \"jenkins@jenkins.domain.tld\"\n",[50,41876,41877,41882,41892],{"__ignoreMap":48},[53,41878,41879],{"class":55,"line":56},[53,41880,41881],{"class":82},"[user]\n",[53,41883,41884,41887,41889],{"class":55,"line":86},[53,41885,41886],{"class":59}," name",[53,41888,1245],{"class":63},[53,41890,41891],{"class":63}," \"Jenkins\"\n",[53,41893,41894,41897,41899],{"class":55,"line":126},[53,41895,41896],{"class":59}," email",[53,41898,1245],{"class":63},[53,41900,41901],{"class":63}," \"jenkins@jenkins.domain.tld\"\n",[18,41903,41904],{},"Jenkins is not able to handle git by default, we have to install a plugin: login -> Jenkins -> manage Jenkins ->\nmanage plugins -> available -> Git plugin (that´s easy to remember)",[18,41906,41907],{},"After restarting Jenkins you´ll find, under “projectX”/configure -> Source Code Management, the new section git where\nyou can insert the url of your repository -> save",[18,41909,41910],{},"Finally you can build the project(small prayer could help 😉 ) and enjoy the built software and Jenkins´ expandable\nworkflow…",[607,41912,41913],{},"html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}",{"title":48,"searchDepth":86,"depth":86,"links":41915},[],[6869,613,996],"2011-10-25T12:34:45","More and more Projects at our company are taking advantage of distributed and local revision control by using git. So to\\nmake a complete software-project fit for git, by not only using git-svn with subversion and git on top, some more\\nsteps are required than just handling files with git, learning its syntax and understanding the way it works…","https://synyx.de/blog/make-software-projects-fit-for-git/",{},"/blog/make-software-projects-fit-for-git",{"title":41598,"description":41608},"blog/make-software-projects-fit-for-git",[41925,290,10891,41377,24907,6884],"apache","More and more Projects at our company are taking advantage of distributed and local revision control by using git. So to make a complete software-project fit for git, by not…","vCNC4CoaSUI5ng_rt7Nu-7xypFBFlUaDbA6FV5gZLNA",{"id":41929,"title":41930,"author":41931,"body":41933,"category":42549,"date":42550,"description":42551,"extension":617,"link":42552,"meta":42553,"navigation":499,"path":42554,"seo":42555,"slug":41937,"stem":42556,"tags":42557,"teaser":42558,"__hash__":42559},"blog/blog/sending-jms-from-oracledb-to-external-activemq-broker.md","Sending JMS from OracleDB to external ActiveMQ Broker",[41932],"menz",{"type":11,"value":41934,"toc":42543},[41935,41938,41941,41944,41948,41955,41958,41969,41973,41976,41979,41996,41999,42019,42028,42032,42035,42147,42150,42243,42246,42355,42358,42410,42414,42417,42425,42428,42431,42465,42494,42504,42523,42526,42540],[14,41936,41930],{"id":41937},"sending-jms-from-oracledb-to-external-activemq-broker",[18,41939,41940],{},"After taking over a legacy application of which a huge part of the business logic is formed by triggers and procedures\ninside an Oracle DB, we faced the task of a step-by-step migration of that logic to Java code. Due to the complete\nlack of a defined and sophisticated service layer and having other systems connected using several autonomous interfaces\nwhich directly access the underlying database this migration is quite complicated.",[18,41942,41943],{},"An idea popped up. As a intermediate step on the way to move the business logic back to the Java side, why not port the\ntrigger PL/SQL to Java (behind a new created service layer) and let the trigger fire JMS messages to trigger the\nexecution of that Java code?",[2207,41945,41947],{"id":41946},"prerequisites","Prerequisites",[18,41949,41950,41951,986],{},"Since Oracle 9i it is possible to use Java code in the Oracle DB. However the provided (not replaceable) JDK lacks\nbehind the current standards. On top of that there are some specifics to be considered. Details on that topic can be\nfound ",[585,41952,10819],{"href":41953,"rel":41954},"http://download.oracle.com/docs/cd/B28359_01/java.111/b31225/chtwo.htm",[589],[18,41956,41957],{},"Our setup consists of the following components:",[577,41959,41960,41963,41966],{},[580,41961,41962],{},"Oracle 11gR2 (providing JDK 1.5)",[580,41964,41965],{},"Active MQ 5.4.2 (the last version that can be build using JDK 1.5)",[580,41967,41968],{},"Spring 3.0.5",[2207,41970,41972],{"id":41971},"preparation-of-user-privileges","Preparation of user privileges",[18,41974,41975],{},"The following privileges must be granted to the database user (in our demonstration: TEST) from which the sending of JMS\nmessages shall occur.",[18,41977,41978],{},"Direct access to the class loader and system properties:",[43,41980,41984],{"className":41981,"code":41982,"language":41983,"meta":48,"style":48},"language-sql shiki shiki-themes github-light github-dark","call dbms_java.grant_permission( 'TEST', 'SYS:java.lang.RuntimePermission', 'getClassLoader', '' );\ncall dbms_java.grant_permission( 'TEST', 'SYS:java.util.PropertyPermission', '*', 'read,write' );\n","sql",[50,41985,41986,41991],{"__ignoreMap":48},[53,41987,41988],{"class":55,"line":56},[53,41989,41990],{},"call dbms_java.grant_permission( 'TEST', 'SYS:java.lang.RuntimePermission', 'getClassLoader', '' );\n",[53,41992,41993],{"class":55,"line":86},[53,41994,41995],{},"call dbms_java.grant_permission( 'TEST', 'SYS:java.util.PropertyPermission', '*', 'read,write' );\n",[18,41997,41998],{},"Creating a socket connection and full access to it:",[43,42000,42002],{"className":41981,"code":42001,"language":41983,"meta":48,"style":48},"call dbms_java.grant_permission( 'TEST', 'SYS:java.net.SocketPermission', '*', 'listen,resolve');\ncall dbms_java.grant_permission( 'TEST', 'SYS:java.net.SocketPermission', '*', 'accept,resolve');\ncall dbms_java.grant_permission( 'TEST', 'SYS:java.net.SocketPermission', '*', 'connect,resolve');\n",[50,42003,42004,42009,42014],{"__ignoreMap":48},[53,42005,42006],{"class":55,"line":56},[53,42007,42008],{},"call dbms_java.grant_permission( 'TEST', 'SYS:java.net.SocketPermission', '*', 'listen,resolve');\n",[53,42010,42011],{"class":55,"line":86},[53,42012,42013],{},"call dbms_java.grant_permission( 'TEST', 'SYS:java.net.SocketPermission', '*', 'accept,resolve');\n",[53,42015,42016],{"class":55,"line":126},[53,42017,42018],{},"call dbms_java.grant_permission( 'TEST', 'SYS:java.net.SocketPermission', '*', 'connect,resolve');\n",[18,42020,42021,42023,42024,42027],{},[27,42022,8922],{}," The placeholder ",[50,42025,42026],{},"'*'"," allows the creation of socket connections to any target host and port. In production\nsystems that shall be reduced to the IP range needed.",[2207,42029,42031],{"id":42030},"a-small-prototype","A small prototype",[18,42033,42034],{},"For demonstration purposes we use Spring to fire up an ActiveMQ broker, initialize a test queue and add a simple\nlistener. Here’s the corresponding part of the application context configuration.",[43,42036,42038],{"className":3792,"code":42037,"language":3794,"meta":48,"style":48},"\n\u003C!-- create message broker - instead of using a somewhere centralized activemq server -->\n\u003Cbean id=\"broker\" class=\"org.apache.activemq.xbean.BrokerFactoryBean\">\n \u003Cproperty name=\"config\" value=\"classpath:META-INF/activemq.xml\" />\n \u003Cproperty name=\"start\" value=\"true\" />\n\u003C/bean>\n\u003C!-- a simple test queue - the queue name is specified by the bean id -->\n\u003Cbean id=\"testQueue\" class=\"org.apache.activemq.command.ActiveMQQueue\" />\n\u003C!-- JMS connection factory (wrapped into a pooling connection factory) -->\n\u003Cbean id=\"jmsFactory\" class=\"org.apache.activemq.pool.PooledConnectionFactory\" destroy-method=\"stop\">\n \u003Cproperty name=\"connectionFactory\">\n \u003Cbean class=\"org.apache.activemq.ActiveMQConnectionFactory\">\n \u003Cproperty name=\"brokerURL\" value=\"tcp://localhost:61616\" />\n \u003C/bean>\n \u003C/property>\n\u003C/bean>\n\u003C!-- simple message listener just logging received message to stdout -->\n\u003Cbean id=\"simpleListener\" class=\"com.synyx.prototype.jms.SimpleMessageListener\" />\n\u003C!-- listener container delegating to listener instances and wiring them to their destinations -->\n\u003Cjms:listener-container concurrency=\"10\" connection-factory=\"jmsFactory\">\n \u003Cjms:listener id=\"queueListener\" destination=\"testQueue\" ref=\"simpleListener\" />\n\u003C/jms:listener-container>\n\n",[50,42039,42040,42044,42049,42054,42059,42064,42068,42073,42078,42083,42088,42093,42098,42103,42108,42113,42117,42122,42127,42132,42137,42142],{"__ignoreMap":48},[53,42041,42042],{"class":55,"line":56},[53,42043,500],{"emptyLinePlaceholder":499},[53,42045,42046],{"class":55,"line":86},[53,42047,42048],{},"\u003C!-- create message broker - instead of using a somewhere centralized activemq server -->\n",[53,42050,42051],{"class":55,"line":126},[53,42052,42053],{},"\u003Cbean id=\"broker\" class=\"org.apache.activemq.xbean.BrokerFactoryBean\">\n",[53,42055,42056],{"class":55,"line":163},[53,42057,42058],{}," \u003Cproperty name=\"config\" value=\"classpath:META-INF/activemq.xml\" />\n",[53,42060,42061],{"class":55,"line":186},[53,42062,42063],{}," \u003Cproperty name=\"start\" value=\"true\" />\n",[53,42065,42066],{"class":55,"line":221},[53,42067,40519],{},[53,42069,42070],{"class":55,"line":242},[53,42071,42072],{},"\u003C!-- a simple test queue - the queue name is specified by the bean id -->\n",[53,42074,42075],{"class":55,"line":273},[53,42076,42077],{},"\u003Cbean id=\"testQueue\" class=\"org.apache.activemq.command.ActiveMQQueue\" />\n",[53,42079,42080],{"class":55,"line":279},[53,42081,42082],{},"\u003C!-- JMS connection factory (wrapped into a pooling connection factory) -->\n",[53,42084,42085],{"class":55,"line":496},[53,42086,42087],{},"\u003Cbean id=\"jmsFactory\" class=\"org.apache.activemq.pool.PooledConnectionFactory\" destroy-method=\"stop\">\n",[53,42089,42090],{"class":55,"line":503},[53,42091,42092],{}," \u003Cproperty name=\"connectionFactory\">\n",[53,42094,42095],{"class":55,"line":509},[53,42096,42097],{}," \u003Cbean class=\"org.apache.activemq.ActiveMQConnectionFactory\">\n",[53,42099,42100],{"class":55,"line":515},[53,42101,42102],{}," \u003Cproperty name=\"brokerURL\" value=\"tcp://localhost:61616\" />\n",[53,42104,42105],{"class":55,"line":521},[53,42106,42107],{}," \u003C/bean>\n",[53,42109,42110],{"class":55,"line":527},[53,42111,42112],{}," \u003C/property>\n",[53,42114,42115],{"class":55,"line":533},[53,42116,40519],{},[53,42118,42119],{"class":55,"line":539},[53,42120,42121],{},"\u003C!-- simple message listener just logging received message to stdout -->\n",[53,42123,42124],{"class":55,"line":545},[53,42125,42126],{},"\u003Cbean id=\"simpleListener\" class=\"com.synyx.prototype.jms.SimpleMessageListener\" />\n",[53,42128,42129],{"class":55,"line":2414},[53,42130,42131],{},"\u003C!-- listener container delegating to listener instances and wiring them to their destinations -->\n",[53,42133,42134],{"class":55,"line":2426},[53,42135,42136],{},"\u003Cjms:listener-container concurrency=\"10\" connection-factory=\"jmsFactory\">\n",[53,42138,42139],{"class":55,"line":2438},[53,42140,42141],{}," \u003Cjms:listener id=\"queueListener\" destination=\"testQueue\" ref=\"simpleListener\" />\n",[53,42143,42144],{"class":55,"line":2451},[53,42145,42146],{},"\u003C/jms:listener-container>\n",[18,42148,42149],{},"On the side of the message producer a QueueConnectionFactory implementation provides the connectivity to the ActiveMQ\nbroker. For our prototype it lacks any authentication mechanisms.",[43,42151,42153],{"className":288,"code":42152,"language":290,"meta":48,"style":48},"\npublic class ActiveMQQueueConnectionFactory implements QueueConnectionFactory {\n private ConnectionFactory connectionFactory = null;\n public ActiveMQQueueConnectionFactory(String brokerUrl) {\n this.connectionFactory = new ActiveMQConnectionFactory(brokerUrl);\n }\n public QueueConnection createQueueConnection() throws JMSException {\n return (QueueConnection) createConnection();\n }\n public QueueConnection createQueueConnection(String username, String password) throws JMSException {\n return createQueueConnection();\n }\n public Connection createConnection() throws JMSException {\n return this.connectionFactory.createConnection();\n }\n public Connection createConnection(String username, String password) throws JMSException {\n return createConnection();\n }\n}\n\n",[50,42154,42155,42159,42164,42169,42174,42179,42183,42188,42193,42197,42202,42207,42211,42216,42221,42225,42230,42235,42239],{"__ignoreMap":48},[53,42156,42157],{"class":55,"line":56},[53,42158,500],{"emptyLinePlaceholder":499},[53,42160,42161],{"class":55,"line":86},[53,42162,42163],{},"public class ActiveMQQueueConnectionFactory implements QueueConnectionFactory {\n",[53,42165,42166],{"class":55,"line":126},[53,42167,42168],{}," private ConnectionFactory connectionFactory = null;\n",[53,42170,42171],{"class":55,"line":163},[53,42172,42173],{}," public ActiveMQQueueConnectionFactory(String brokerUrl) {\n",[53,42175,42176],{"class":55,"line":186},[53,42177,42178],{}," this.connectionFactory = new ActiveMQConnectionFactory(brokerUrl);\n",[53,42180,42181],{"class":55,"line":221},[53,42182,860],{},[53,42184,42185],{"class":55,"line":242},[53,42186,42187],{}," public QueueConnection createQueueConnection() throws JMSException {\n",[53,42189,42190],{"class":55,"line":273},[53,42191,42192],{}," return (QueueConnection) createConnection();\n",[53,42194,42195],{"class":55,"line":279},[53,42196,860],{},[53,42198,42199],{"class":55,"line":496},[53,42200,42201],{}," public QueueConnection createQueueConnection(String username, String password) throws JMSException {\n",[53,42203,42204],{"class":55,"line":503},[53,42205,42206],{}," return createQueueConnection();\n",[53,42208,42209],{"class":55,"line":509},[53,42210,860],{},[53,42212,42213],{"class":55,"line":515},[53,42214,42215],{}," public Connection createConnection() throws JMSException {\n",[53,42217,42218],{"class":55,"line":521},[53,42219,42220],{}," return this.connectionFactory.createConnection();\n",[53,42222,42223],{"class":55,"line":527},[53,42224,860],{},[53,42226,42227],{"class":55,"line":533},[53,42228,42229],{}," public Connection createConnection(String username, String password) throws JMSException {\n",[53,42231,42232],{"class":55,"line":539},[53,42233,42234],{}," return createConnection();\n",[53,42236,42237],{"class":55,"line":545},[53,42238,860],{},[53,42240,42241],{"class":55,"line":2414},[53,42242,282],{},[18,42244,42245],{},"The class QueueMessageSender implements a simple text message producer. Note that the JMSException isn’t caught but\npropageted to the caller. Exceptions are finally handled by the global Oracle VM Exception Handler. That way, in case of\nan exception, the exceptions message ends up in the ORA-XXXX error designation and the full stack trace is stated in\nthe user’s session log.",[43,42247,42249],{"className":288,"code":42248,"language":290,"meta":48,"style":48},"\npublic class QueueMessageSender {\n private QueueConnectionFactory connectionFactory = null;\n public QueueMessageSender(QueueConnectionFactory connectionFactory) {\n this.connectionFactory = connectionFactory;\n }\n public void sendMessage(String destination, String message) throws JMSException {\n QueueConnection connection = null;\n try {\n connection = this.connectionFactory.createQueueConnection();\n QueueSession session = connection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE);\n Queue queue = session.createQueue(destination);\n QueueSender sender = session.createSender(queue);\n TextMessage textMessage = session.createTextMessage(message);\n sender.send(textMessage);\n } finally {\n if (null != connection) {\n connection.close();\n }\n }\n }\n}\n\n",[50,42250,42251,42255,42260,42265,42270,42275,42279,42284,42289,42294,42299,42304,42309,42314,42319,42324,42329,42334,42339,42343,42347,42351],{"__ignoreMap":48},[53,42252,42253],{"class":55,"line":56},[53,42254,500],{"emptyLinePlaceholder":499},[53,42256,42257],{"class":55,"line":86},[53,42258,42259],{},"public class QueueMessageSender {\n",[53,42261,42262],{"class":55,"line":126},[53,42263,42264],{}," private QueueConnectionFactory connectionFactory = null;\n",[53,42266,42267],{"class":55,"line":163},[53,42268,42269],{}," public QueueMessageSender(QueueConnectionFactory connectionFactory) {\n",[53,42271,42272],{"class":55,"line":186},[53,42273,42274],{}," this.connectionFactory = connectionFactory;\n",[53,42276,42277],{"class":55,"line":221},[53,42278,860],{},[53,42280,42281],{"class":55,"line":242},[53,42282,42283],{}," public void sendMessage(String destination, String message) throws JMSException {\n",[53,42285,42286],{"class":55,"line":273},[53,42287,42288],{}," QueueConnection connection = null;\n",[53,42290,42291],{"class":55,"line":279},[53,42292,42293],{}," try {\n",[53,42295,42296],{"class":55,"line":496},[53,42297,42298],{}," connection = this.connectionFactory.createQueueConnection();\n",[53,42300,42301],{"class":55,"line":503},[53,42302,42303],{}," QueueSession session = connection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE);\n",[53,42305,42306],{"class":55,"line":509},[53,42307,42308],{}," Queue queue = session.createQueue(destination);\n",[53,42310,42311],{"class":55,"line":515},[53,42312,42313],{}," QueueSender sender = session.createSender(queue);\n",[53,42315,42316],{"class":55,"line":521},[53,42317,42318],{}," TextMessage textMessage = session.createTextMessage(message);\n",[53,42320,42321],{"class":55,"line":527},[53,42322,42323],{}," sender.send(textMessage);\n",[53,42325,42326],{"class":55,"line":533},[53,42327,42328],{}," } finally {\n",[53,42330,42331],{"class":55,"line":539},[53,42332,42333],{}," if (null != connection) {\n",[53,42335,42336],{"class":55,"line":545},[53,42337,42338],{}," connection.close();\n",[53,42340,42341],{"class":55,"line":2414},[53,42342,20673],{},[53,42344,42345],{"class":55,"line":2426},[53,42346,3242],{},[53,42348,42349],{"class":55,"line":2438},[53,42350,860],{},[53,42352,42353],{"class":55,"line":2451},[53,42354,282],{},[18,42356,42357],{},"The following class provides the entry point to be called from PL/SQL (i.e. sending a message triggering the service\ncall replacing the legacy trigger code). For demonstration purposes it is kept simple like the rest of the code\nexamples. Note that the method acting as entry point must be static for beeing callable from PL/SQL.",[43,42359,42361],{"className":288,"code":42360,"language":290,"meta":48,"style":48},"\npublic class JMSFromOracleTest {\n private static final String BROKER_URL = \"tcp://192.168.x.x:61616\";\n private static final String QUEUE_NAME = \"testQueue\";\n public static void sendMessage(String message) throws JMSException {\n QueueConnectionFactory connectionFactory = new ActiveMQQueueConnectionFactory(BROKER_URL);\n QueueMessageSender sender = new QueueMessageSender(connectionFactory);\n sender.sendMessage(QUEUE_NAME, message);\n }\n}\n\n",[50,42362,42363,42367,42372,42377,42382,42387,42392,42397,42402,42406],{"__ignoreMap":48},[53,42364,42365],{"class":55,"line":56},[53,42366,500],{"emptyLinePlaceholder":499},[53,42368,42369],{"class":55,"line":86},[53,42370,42371],{},"public class JMSFromOracleTest {\n",[53,42373,42374],{"class":55,"line":126},[53,42375,42376],{}," private static final String BROKER_URL = \"tcp://192.168.x.x:61616\";\n",[53,42378,42379],{"class":55,"line":163},[53,42380,42381],{}," private static final String QUEUE_NAME = \"testQueue\";\n",[53,42383,42384],{"class":55,"line":186},[53,42385,42386],{}," public static void sendMessage(String message) throws JMSException {\n",[53,42388,42389],{"class":55,"line":221},[53,42390,42391],{}," QueueConnectionFactory connectionFactory = new ActiveMQQueueConnectionFactory(BROKER_URL);\n",[53,42393,42394],{"class":55,"line":242},[53,42395,42396],{}," QueueMessageSender sender = new QueueMessageSender(connectionFactory);\n",[53,42398,42399],{"class":55,"line":273},[53,42400,42401],{}," sender.sendMessage(QUEUE_NAME, message);\n",[53,42403,42404],{"class":55,"line":279},[53,42405,860],{},[53,42407,42408],{"class":55,"line":496},[53,42409,282],{},[2207,42411,42413],{"id":42412},"packaging-and-deployment-to-the-oracle-db","Packaging and Deployment to the Oracle DB",[18,42415,42416],{},"To simplify the deployment we created a small Maven project covering the producer code and used the Maven Assembly\nplugin for packaging the producer classes and all dependencies into a single JAR file. Again, to keep things simple we\nadded the activemq-all distribution as the only dependency.",[18,42418,42419,42421,42422,42424],{},[27,42420,8922],{}," All classes (the producer classes and ",[573,42423,19024],{}," dependencies) need to be compiled using/for JDK 1.5 (class\nversion \u003C= 49.0).",[18,42426,42427],{},"Oracle keeps all Java class files and resources in the database. As the name implies, the command line tool “loadjava”\nis used to load Java resources into the db. This command must be issued on the Oracle DB server itself. For that, the\nenvironment variable ORACLE_HOME must be correctly set. In reverse, the tool “dropjava” provides an easy way to unload\nJava resources from the DB.",[18,42429,42430],{},"Issuing the following command loads all resources contained in our JAR file into the Oracle DB.",[43,42432,42434],{"className":45,"code":42433,"language":47,"meta":48,"style":48},"\nloadjava -v -r -u test/12345 -resolver \"((* TEST) (* PUBLIC) (* -))\" JMSSender-1.0-SNAPSHOT-jar-with-dependencies.jar\n\n",[50,42435,42436,42440],{"__ignoreMap":48},[53,42437,42438],{"class":55,"line":56},[53,42439,500],{"emptyLinePlaceholder":499},[53,42441,42442,42445,42447,42450,42453,42456,42459,42462],{"class":55,"line":86},[53,42443,42444],{"class":59},"loadjava",[53,42446,8667],{"class":89},[53,42448,42449],{"class":89}," -r",[53,42451,42452],{"class":89}," -u",[53,42454,42455],{"class":63}," test/12345",[53,42457,42458],{"class":89}," -resolver",[53,42460,42461],{"class":63}," \"((* TEST) (* PUBLIC) (* -))\"",[53,42463,42464],{"class":63}," JMSSender-1.0-SNAPSHOT-jar-with-dependencies.jar\n",[18,42466,42467,42468,42471,42472,42475,42476,42478,42479,42482,42483,11792,42486,42489,42490,42493],{},"The switch ",[50,42469,42470],{},"-r"," enables the resolving of all classes references by the loaded classes. If any reference made by a class\ncould not be resolved, that class is marked ",[50,42473,42474],{},"INVALID",". Classes referencing invalid classes are also marked ",[50,42477,42474],{},".\nNote that we declared our own resolver using the ",[50,42480,42481],{},"-resolver"," parameter. Using the parameter as stated above all classes\nin any package declared in the SCHEMA ",[50,42484,42485],{},"TEST",[50,42487,42488],{},"PUBLIC"," are allowed to reference unresolved dependencies. That way\nfeatures and connectors of ActiveMQ not used (and therefore lack the required dependencies) do not invalidate the core\nclasses. The parameter ",[50,42491,42492],{},"-u"," is followed by the user credentials (username/password) of the user to whose default schema\nthe resources are deployed.",[18,42495,42496,42497,42500,42501,986],{},"After all resources contained in the JAR file are deployed (this may take a while – but keeps you entertained because of\nthe ",[50,42498,42499],{},"-v"," parameter), we need to create a stored procedure usable from PL/SQL that directs the call to the entry point\nmethod of our Java implementation. For general information on calling Java Methods from PL/SQL (i.e. referencing\nparameters and return values) see ",[585,42502,10819],{"href":41953,"rel":42503},[589],[43,42505,42507],{"className":41981,"code":42506,"language":41983,"meta":48,"style":48},"\nCREATE OR REPLACE PROCEDURE sendJmsMessage(message IN VARCHAR2)\nAS LANGUAGE JAVA NAME 'com.synyx.prototype.jms.JMSFromOracleTest.sendMessage(java.lang.String)';\n\n",[50,42508,42509,42513,42518],{"__ignoreMap":48},[53,42510,42511],{"class":55,"line":56},[53,42512,500],{"emptyLinePlaceholder":499},[53,42514,42515],{"class":55,"line":86},[53,42516,42517],{},"CREATE OR REPLACE PROCEDURE sendJmsMessage(message IN VARCHAR2)\n",[53,42519,42520],{"class":55,"line":126},[53,42521,42522],{},"AS LANGUAGE JAVA NAME 'com.synyx.prototype.jms.JMSFromOracleTest.sendMessage(java.lang.String)';\n",[18,42524,42525],{},"Finally, we are done. Calling the procedure from PL/SQL will invoke our Java method and a text message containing the\ngiven text will be posted.",[43,42527,42529],{"className":41981,"code":42528,"language":41983,"meta":48,"style":48},"\ncall sendjmsmessage('hello from oracle');\n\n",[50,42530,42531,42535],{"__ignoreMap":48},[53,42532,42533],{"class":55,"line":56},[53,42534,500],{"emptyLinePlaceholder":499},[53,42536,42537],{"class":55,"line":86},[53,42538,42539],{},"call sendjmsmessage('hello from oracle');\n",[607,42541,42542],{},"html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}",{"title":48,"searchDepth":86,"depth":86,"links":42544},[42545,42546,42547,42548],{"id":41946,"depth":86,"text":41947},{"id":41971,"depth":86,"text":41972},{"id":42030,"depth":86,"text":42031},{"id":42412,"depth":86,"text":42413},[613],"2011-10-18T17:37:19","After taking over a legacy application of which a huge part of the business logic is formed by triggers and procedures\\ninside an Oracle DB, we faced the task of a step-by-step migration of that logic to Java code. Due to the complete\\nlack of a defined and sophisticated service layer and having other systems connected using several autonomous interfaces\\nwhich directly access the underlying database this migration is quite complicated.","https://synyx.de/blog/sending-jms-from-oracledb-to-external-activemq-broker/",{},"/blog/sending-jms-from-oracledb-to-external-activemq-broker",{"title":41930,"description":41940},"blog/sending-jms-from-oracledb-to-external-activemq-broker",[],"After taking over a legacy application of which a huge part of the business logic is formed by triggers and procedures inside an Oracle DB, we faced the task of…","8b5tf03Ms7jGKzpnvc9ei6GFG6iPwFbrgDPlVaOGXeQ",{"id":42561,"title":42562,"author":42563,"body":42565,"category":42894,"date":42895,"description":42896,"extension":617,"link":42897,"meta":42898,"navigation":499,"path":42899,"seo":42900,"slug":42902,"stem":42903,"tags":42904,"teaser":42905,"__hash__":42906},"blog/blog/testing-webapp-startup-on-jenkins-with-maven-tomcat-and-web-driver.md","Testing webapp startup on Jenkins using Maven, Tomcat and Web Driver",[42564],"hopf",{"type":11,"value":42566,"toc":42892},[42567,42570,42588,42591,42597,42600,42603,42617,42649,42652,42792,42795,42887,42890],[14,42568,42562],{"id":42569},"testing-webapp-startup-on-jenkins-using-maven-tomcat-and-web-driver",[18,42571,42572,42573,42578,42579,42582,42583,986],{},"Modern web applications often consist of quite some configuration files that should at least be tested for validity.\nThink\nof ",[585,42574,42577],{"href":42575,"rel":42576},"http://static.springsource.org/spring/docs/3.0.6.RELEASE/spring-framework-reference/html/mvc.html",[589],"Spring controller configurations",",\nweb application descriptors and the like that can’t be tested easily using Unit Tests. Fortunately it’s quite easy to\nstart a tomcat instance on your CI system (",[585,42580,35246],{"href":35244,"rel":42581},[589]," or Hudson) using\nthe ",[585,42584,42587],{"href":42585,"rel":42586},"https://web.archive.org/web/20150531090420/http://mojo.codehaus.org/tomcat-maven-plugin/",[589],"Tomcat Maven Plugin",[18,42589,42590],{},"As you probably don’t want to start and stop the server on every test run it’s a good idea to bind it to the\nintegration-test phase, probably even to a separate profile that is only triggered on the continuos integration\nmachine. This is what the plugin configuration might look like:",[43,42592,42595],{"className":42593,"code":42594,"language":6663},[6662],"\n\u003Cplugin>\n \u003CgroupId>org.codehaus.mojo\u003C/groupId>\n \u003CartifactId>tomcat-maven-plugin\u003C/artifactId>\n \u003Cversion>1.1\u003C/version>\n \u003Cconfiguration>\n \u003Cfork>true\u003C/fork>\n \u003Cport>8081\u003C/port>\n \u003C/configuration>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cid>start-tc\u003C/id>\n \u003Cphase>pre-integration-test\u003C/phase>\n \u003Cgoals>\n \u003Cgoal>run-war-only\u003C/goal>\n \u003C/goals>\n \u003C/execution>\n \u003Cexecution>\n \u003Cid>stop-tc\u003C/id>\n \u003Cphase>post-integration-test\u003C/phase>\n \u003Cgoals>\n \u003Cgoal>shutdown\u003C/goal>\n \u003C/goals>\n \u003C/execution>\n \u003C/executions>\n\u003C/plugin>\n",[50,42596,42594],{"__ignoreMap":48},[18,42598,42599],{},"We bind the startup to the pre-integration-test phase, which is triggered just before running the integration tests.\nIn post-integration-test we shutdown the server. When running on a CI webapp it’s important to choose a different port\nthan the one that is used for the CI Server as startup will fail when the port is already in use. We are forking to\ncontinue with the Maven execution, if you skip this parameter Maven will just stop after it started Tomcat.",[18,42601,42602],{},"If you run the plugin with a failing web configuration (e.g. when you Spring web context breaks) the failures will be\nlogged to the console output. Unfortunately this doesn’t break the build and you won’t notice that there might be a\nproblem.",[18,42604,42605,42606,42611,42612,42616],{},"One way to have the build fail is to add a Test Case that issues a HTTP call to you web application. A good tool for\ndoing this is the ",[585,42607,42610],{"href":42608,"rel":42609},"http://seleniumhq.org/projects/webdriver/",[589],"Web Driver project"," which merged\nwith ",[585,42613,42615],{"href":35673,"rel":42614},[589],"Selenium"," in Selenium 2.0. Add the dependency to your build:",[43,42618,42620],{"className":3792,"code":42619,"language":3794,"meta":48,"style":48},"\u003Cdependency>\n \u003CgroupId>org.seleniumhq.selenium\u003C/groupId>\n \u003CartifactId>selenium-htmlunit-driver\u003C/artifactId>\n \u003Cversion>2.3.1\u003C/version>\n \u003Cscope>test\u003C/scope>\n\u003C/dependency>\n",[50,42621,42622,42626,42631,42636,42641,42645],{"__ignoreMap":48},[53,42623,42624],{"class":55,"line":56},[53,42625,36877],{},[53,42627,42628],{"class":55,"line":86},[53,42629,42630],{}," \u003CgroupId>org.seleniumhq.selenium\u003C/groupId>\n",[53,42632,42633],{"class":55,"line":126},[53,42634,42635],{}," \u003CartifactId>selenium-htmlunit-driver\u003C/artifactId>\n",[53,42637,42638],{"class":55,"line":163},[53,42639,42640],{}," \u003Cversion>2.3.1\u003C/version>\n",[53,42642,42643],{"class":55,"line":186},[53,42644,35885],{},[53,42646,42647],{"class":55,"line":221},[53,42648,36897],{},[18,42650,42651],{},"A simple webtest that just calls a page and checks for a certain html element might look like this:",[43,42653,42655],{"className":288,"code":42654,"language":290,"meta":48,"style":48},"import org.junit.Test;\nimport org.openqa.selenium.By;\nimport org.openqa.selenium.NoSuchElementException;\nimport org.openqa.selenium.WebDriver;\nimport org.openqa.selenium.WebElement;\nimport org.openqa.selenium.htmlunit.HtmlUnitDriver;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.fail;\n/**\n * Simple web test that just queries the login page through the controller.\n * @author Florian Hopf, Synyx GmbH & Co. KG, hopf@synyx.de\n */\npublic class LoginPageWebtest {\n @Test\n public void testPage() {\n WebDriver driver = new HtmlUnitDriver();\n driver.get(\"http://localhost:8081/url/that/redirects/to/login/\");\n try {\n // Find the text input element by its name\n WebElement element = driver.findElement(By.name(\"username\"));\n assertNotNull(element);\n } catch (NoSuchElementException ex) {\n fail(\"Startup of context failed. See console output for more information, : \" + ex.getMessage());\n }\n //Close the browser\n driver.quit();\n }\n}\n",[50,42656,42657,42662,42667,42672,42677,42682,42687,42692,42697,42702,42707,42712,42717,42722,42726,42731,42736,42741,42745,42750,42755,42760,42765,42770,42774,42779,42784,42788],{"__ignoreMap":48},[53,42658,42659],{"class":55,"line":56},[53,42660,42661],{},"import org.junit.Test;\n",[53,42663,42664],{"class":55,"line":86},[53,42665,42666],{},"import org.openqa.selenium.By;\n",[53,42668,42669],{"class":55,"line":126},[53,42670,42671],{},"import org.openqa.selenium.NoSuchElementException;\n",[53,42673,42674],{"class":55,"line":163},[53,42675,42676],{},"import org.openqa.selenium.WebDriver;\n",[53,42678,42679],{"class":55,"line":186},[53,42680,42681],{},"import org.openqa.selenium.WebElement;\n",[53,42683,42684],{"class":55,"line":221},[53,42685,42686],{},"import org.openqa.selenium.htmlunit.HtmlUnitDriver;\n",[53,42688,42689],{"class":55,"line":242},[53,42690,42691],{},"import static org.junit.Assert.assertNotNull;\n",[53,42693,42694],{"class":55,"line":273},[53,42695,42696],{},"import static org.junit.Assert.fail;\n",[53,42698,42699],{"class":55,"line":279},[53,42700,42701],{},"/**\n",[53,42703,42704],{"class":55,"line":496},[53,42705,42706],{}," * Simple web test that just queries the login page through the controller.\n",[53,42708,42709],{"class":55,"line":503},[53,42710,42711],{}," * @author Florian Hopf, Synyx GmbH & Co. KG, hopf@synyx.de\n",[53,42713,42714],{"class":55,"line":509},[53,42715,42716],{}," */\n",[53,42718,42719],{"class":55,"line":515},[53,42720,42721],{},"public class LoginPageWebtest {\n",[53,42723,42724],{"class":55,"line":521},[53,42725,928],{},[53,42727,42728],{"class":55,"line":527},[53,42729,42730],{}," public void testPage() {\n",[53,42732,42733],{"class":55,"line":533},[53,42734,42735],{}," WebDriver driver = new HtmlUnitDriver();\n",[53,42737,42738],{"class":55,"line":539},[53,42739,42740],{}," driver.get(\"http://localhost:8081/url/that/redirects/to/login/\");\n",[53,42742,42743],{"class":55,"line":545},[53,42744,42293],{},[53,42746,42747],{"class":55,"line":2414},[53,42748,42749],{}," // Find the text input element by its name\n",[53,42751,42752],{"class":55,"line":2426},[53,42753,42754],{}," WebElement element = driver.findElement(By.name(\"username\"));\n",[53,42756,42757],{"class":55,"line":2438},[53,42758,42759],{}," assertNotNull(element);\n",[53,42761,42762],{"class":55,"line":2451},[53,42763,42764],{}," } catch (NoSuchElementException ex) {\n",[53,42766,42767],{"class":55,"line":2459},[53,42768,42769],{}," fail(\"Startup of context failed. See console output for more information, : \" + ex.getMessage());\n",[53,42771,42772],{"class":55,"line":2470},[53,42773,3242],{},[53,42775,42776],{"class":55,"line":2476},[53,42777,42778],{}," //Close the browser\n",[53,42780,42781],{"class":55,"line":2484},[53,42782,42783],{}," driver.quit();\n",[53,42785,42786],{"class":55,"line":2490},[53,42787,860],{},[53,42789,42790],{"class":55,"line":2495},[53,42791,282],{},[18,42793,42794],{},"Execute the Webtest classes in your profile (we are using a naming convention to distinguish web tests from normal unit\ntests):",[43,42796,42798],{"className":3792,"code":42797,"language":3794,"meta":48,"style":48},"\u003Cplugin>\n \u003CgroupId>org.apache.maven.plugins\u003C/groupId>\n \u003CartifactId>maven-surefire-plugin\u003C/artifactId>\n \u003Cexecutions>\n \u003Cexecution>\n \u003Cid>run-webtests\u003C/id>\n \u003Cphase>integration-test\u003C/phase>\n \u003Cgoals>\n \u003Cgoal>test\u003C/goal>\n \u003C/goals>\n \u003Cconfiguration>\n \u003Cincludes>\n \u003Cinclude>**/*Webtest.java\u003C/include>\n \u003C/includes>\n \u003Cskip>false\u003C/skip>\n \u003C/configuration>\n \u003C/execution>\n \u003C/executions>\n\u003C/plugin>\n",[50,42799,42800,42804,42809,42814,42818,42822,42827,42832,42837,42842,42847,42852,42856,42861,42865,42870,42875,42879,42883],{"__ignoreMap":48},[53,42801,42802],{"class":55,"line":56},[53,42803,22511],{},[53,42805,42806],{"class":55,"line":86},[53,42807,42808],{}," \u003CgroupId>org.apache.maven.plugins\u003C/groupId>\n",[53,42810,42811],{"class":55,"line":126},[53,42812,42813],{}," \u003CartifactId>maven-surefire-plugin\u003C/artifactId>\n",[53,42815,42816],{"class":55,"line":163},[53,42817,38120],{},[53,42819,42820],{"class":55,"line":186},[53,42821,22556],{},[53,42823,42824],{"class":55,"line":221},[53,42825,42826],{}," \u003Cid>run-webtests\u003C/id>\n",[53,42828,42829],{"class":55,"line":242},[53,42830,42831],{}," \u003Cphase>integration-test\u003C/phase>\n",[53,42833,42834],{"class":55,"line":273},[53,42835,42836],{}," \u003Cgoals>\n",[53,42838,42839],{"class":55,"line":279},[53,42840,42841],{}," \u003Cgoal>test\u003C/goal>\n",[53,42843,42844],{"class":55,"line":496},[53,42845,42846],{}," \u003C/goals>\n",[53,42848,42849],{"class":55,"line":503},[53,42850,42851],{}," \u003Cconfiguration>\n",[53,42853,42854],{"class":55,"line":509},[53,42855,37030],{},[53,42857,42858],{"class":55,"line":515},[53,42859,42860],{}," \u003Cinclude>**/*Webtest.java\u003C/include>\n",[53,42862,42863],{"class":55,"line":521},[53,42864,37040],{},[53,42866,42867],{"class":55,"line":527},[53,42868,42869],{}," \u003Cskip>false\u003C/skip>\n",[53,42871,42872],{"class":55,"line":533},[53,42873,42874],{}," \u003C/configuration>\n",[53,42876,42877],{"class":55,"line":539},[53,42878,22581],{},[53,42880,42881],{"class":55,"line":545},[53,42882,38153],{},[53,42884,42885],{"class":55,"line":2414},[53,42886,22591],{},[18,42888,42889],{},"That’s all. In case there is an error in your configuration you will be notified by your CI server that the webtest\nfailed.",[607,42891,989],{},{"title":48,"searchDepth":86,"depth":86,"links":42893},[],[613],"2011-10-08T12:31:45","Modern web applications often consist of quite some configuration files that should at least be tested for validity.\\nThink\\nof Spring controller configurations,\\nweb application descriptors and the like that can’t be tested easily using Unit Tests. Fortunately it’s quite easy to\\nstart a tomcat instance on your CI system (Jenkins or Hudson) using\\nthe Tomcat Maven Plugin.","https://synyx.de/blog/testing-webapp-startup-on-jenkins-with-maven-tomcat-and-web-driver/",{},"/blog/testing-webapp-startup-on-jenkins-with-maven-tomcat-and-web-driver",{"title":42562,"description":42901},"Modern web applications often consist of quite some configuration files that should at least be tested for validity.\nThink\nof Spring controller configurations,\nweb application descriptors and the like that can’t be tested easily using Unit Tests. Fortunately it’s quite easy to\nstart a tomcat instance on your CI system (Jenkins or Hudson) using\nthe Tomcat Maven Plugin.","testing-webapp-startup-on-jenkins-with-maven-tomcat-and-web-driver","blog/testing-webapp-startup-on-jenkins-with-maven-tomcat-and-web-driver",[21727,10891,21474,27672,35599],"Modern web applications often consist of quite some configuration files that should at least be tested for validity. Think of Spring controller configurations, web application descriptors and the like that…","FKN3Kcm_JeX78XwCXPVYrt9BAukKJKmqYA8c8wOwG1I",{"id":42908,"title":42909,"author":42910,"body":42911,"category":43011,"date":43012,"description":43013,"extension":617,"link":43014,"meta":43015,"navigation":499,"path":43016,"seo":43017,"slug":42915,"stem":43018,"tags":43019,"teaser":43025,"__hash__":43026},"blog/blog/number-formats-and-jdbc-voodoo.md","Number formats and JDBC voodoo",[7619],{"type":11,"value":42912,"toc":43009},[42913,42916,42919,42922,42931,42934,42937,42946,42957,42975,42982,42991,42998,43007],[14,42914,42909],{"id":42915},"number-formats-and-jdbc-voodoo",[18,42917,42918],{},"Ever had to insert some numeric values into an Oracle database? From your application through JDBC? You think “this is\nan everyday task – what should go wrong?” – well just read on…",[18,42920,42921],{},"Imagine you have got a simple table that has some numeric values that you want to update. Normally the easiest way to do\nthis is to perform a simple update statement:",[43,42923,42925],{"className":41981,"code":42924,"language":41983,"meta":48,"style":48},"update distances set distance = 12.250 where id = 1;\n",[50,42926,42927],{"__ignoreMap":48},[53,42928,42929],{"class":55,"line":56},[53,42930,42924],{},[18,42932,42933],{},"And guess what happens? The row with the ID 1 is updated to the value 12.250.",[18,42935,42936],{},"Now we have got an application that has build its own OR-mapper. As it acts in a very generic way, it does not care\nwhich data is set on a certain field. So it treats numbers the same way as strings, which results in the following\nstatement:",[43,42938,42940],{"className":41981,"code":42939,"language":41983,"meta":48,"style":48},"update distances set distance = '12.250' where id = 1;\n",[50,42941,42942],{"__ignoreMap":48},[53,42943,42944],{"class":55,"line":56},[53,42945,42939],{},[18,42947,42948,42949,42952,42953,42956],{},"No major problem one may think – oracle will just implicitly call the ",[50,42950,42951],{},"TO_NUMBER()"," function and everything should work\nlike a charm. Should – but does not. At least not always. Sometimes it fails with ",[50,42954,42955],{},"ORA-01722: invalid number",". But why?",[18,42958,42959,42960,42962,42963,42966,42967,42970,42971,42974],{},"Let’s take a close look at the ",[50,42961,42951],{}," function. The function can be configured through the (optional)\n",[50,42964,42965],{},"nlsparams",", where one is ",[50,42968,42969],{},"NLS_NUMERIC_CHARACTERS=''dg''",". ‘d’ tells the function which character is used as the decimal\nseparator, ‘g’ the group separator. If the conversion is done implicit it is not possible to set those parameters\ndirectly, instead those that are set through the environment variable ",[50,42972,42973],{},"NLS_NUMERIC_CHARACTERS"," is used. If this variable\nis not set either, the oracle default value is used, which is ‘.,’ (UK format).",[18,42976,42977,42978,42981],{},"On our setup no value was set, so one might expect that the above statement should work. It did, as long as it was\nexecuted on the database server. If failed with ",[50,42979,42980],{},"ORA-01722"," in case it was run from a remote database tool or on an\napplication server. More strangely it succeeded if it was run in the following way (but only from remote, it failed if\nrun from the server):",[43,42983,42985],{"className":41981,"code":42984,"language":41983,"meta":48,"style":48},"update distances set distance = '12,250' where id = 1;\n",[50,42986,42987],{"__ignoreMap":48},[53,42988,42989],{"class":55,"line":56},[53,42990,42984],{},[18,42992,42993,42994,42997],{},"So the only difference was the JDBC driver that is in-between. And there we have got the problem: The JDBC driver tries\nto do some “intelligent” conversion of your SQL statements. For that purpose it uses the system property\n",[50,42995,42996],{},"user.language",". If this property is not set explicitly, is is set through the locale of the system it is running on.\nThis is no problem as long as the application runs on a system that has a locale set to en_*. But as soon as it is run\non a system that provides a locale with a different number format (like de_DE), the JDBC driver tries to convert the\nnumeric values and the statement fails on the database.",[18,42999,43000,43001,43003,43004,43006],{},"So if your application connects your database through JDBC and you rely on the implicit ",[50,43002,42951],{}," conversion of\nOracle, make sure the system property ",[50,43005,42996],{}," is set to the correct value!",[607,43008,989],{},{"title":48,"searchDepth":86,"depth":86,"links":43010},[],[613],"2011-09-13T12:41:11","Ever had to insert some numeric values into an Oracle database? From your application through JDBC? You think “this is\\nan everyday task – what should go wrong?” – well just read on…","https://synyx.de/blog/number-formats-and-jdbc-voodoo/",{},"/blog/number-formats-and-jdbc-voodoo",{"title":42909,"description":42918},"blog/number-formats-and-jdbc-voodoo",[43020,43021,43022,43023,43024],"jdbc","number","ora-01722","oracle","to_number","Ever had to insert some numeric values into an Oracle database? From your application through JDBC? You think “this is an everyday task – what should go wrong?” – well…","JDixzOeBinZa4_Xzi6M_R8-dHrKNSkZkOQhmKWX2sp8",{"id":43028,"title":43029,"author":43030,"body":43032,"category":43239,"date":43240,"description":43241,"extension":617,"link":43242,"meta":43243,"navigation":499,"path":43244,"seo":43245,"slug":43036,"stem":43247,"tags":43248,"teaser":43250,"__hash__":43251},"blog/blog/continuous-delivery-or-how-i-learned-to-stop-worrying-and-love-the-pipeline.md","Continuous Delivery or: How I Learned to Stop Worrying and Love the Pipeline",[43031],"speaker-schalanda",{"type":11,"value":43033,"toc":43237},[43034,43037,43078,43084,43087,43096,43099,43102,43124,43127,43134,43137,43165,43179,43188,43191,43207,43212,43218,43229],[14,43035,43029],{"id":43036},"continuous-delivery-or-how-i-learned-to-stop-worrying-and-love-the-pipeline",[18,43038,43039,43040,43043,43044,43049,43050,43055,43056,43061,43062,43067,43068,43061,43073,5881],{},"Following our principle of ",[573,43041,43042],{},"Continuous Skill Enhancement"," here at ",[585,43045,43048],{"href":43046,"rel":43047},"https://synyx.de/leben-und-arbeiten/",[589],"Synyx"," I\nrecently read the\nbook ",[585,43051,43054],{"href":43052,"rel":43053},"http://www.informit.com/store/product.aspx?isbn=0321601912",[589],"Continuous Delivery: Reliable Software Releases through Build, Test, and Deployment Automation","\nby ",[585,43057,43060],{"href":43058,"rel":43059},"http://jezhumble.net/",[589],"Jez Humble"," (from ",[585,43063,43066],{"href":43064,"rel":43065},"http://www.thoughtworks.com/",[589],"ThoughtWorks",")\nand ",[585,43069,43072],{"href":43070,"rel":43071},"http://www.davefarley.net/",[589],"David Farley",[585,43074,43077],{"href":43075,"rel":43076},"http://www.lmaxtrader.co.uk/",[589],"LMAX",[18,43079,43080],{},[2223,43081],{"alt":43082,"src":43083},"\"Continuous Delivery\"","https://media.synyx.de/uploads//2011/08/continuous_delivery_cover.jpeg",[18,43085,43086],{},"The book consists of three distinct parts.",[18,43088,43089,43090,43095],{},"Part one provides a high-level overview about the basics of software delivery. The authors are touching topics such as\nconfiguration management, continuous integration and software testing, describing what they are good for and what the\nchallenges are when implementing them. While the chapters help to understand the terminology used throughout the book\nthey don’t (and cannot) describe each of the topics in great detail – there\nare ",[585,43091,43094],{"href":43092,"rel":43093},"http://www.informit.com/store/product.aspx?isbn=0321336380",[589],"other books"," for that. But of course you’re already\nfamiliar with these topics so it’s just a little refresher.",[18,43097,43098],{},"Part two is dedicated to the central concept described in Continuous Delivery: the deployment pipeline. The idea is to\nreceive immediate feedback on errors and regressions as early in the development lifecycle of a project as possible and\nto provide a working application to the users as early and often as possible.",[18,43100,43101],{},"This means that every commit by a developer triggers a run of the deployment pipeline. It starts with building the\nartifact (obviously), proceeds to the first test stage running unit tests, from which it continues to the integration\ntest phase. If all tests ran successfully the artifact will continue through the stages of the deployment pipeline, e.\ng. a smoke test or non-functional test stage (think security and performance tests) and to a UAT (user acceptance\ntesting) stage. Finally the artifact will end up in the staging environment and from there it should require only the\nclick of a button to deploy it to production. Of course the authors describe each step in great detail and have some\nanecdotes from their projects to lighten up the text.",[18,43103,43104,43105,43109,43110,43113,43114,11792,43119,43123],{},"The central theme of part three is managing different parts of the delivery ecosystem. The authors discuss pros and cons\nof physical servers, virtualized servers and cloud computing, introduce the reader to the concepts of automatic machine\nprovisioning and configuration management with ",[585,43106,34285],{"href":43107,"rel":43108},"http://www.puppetlabs.com/",[589],", monitoring your systems and\ncollecting logs and performance data. They talk about managing test data, how to version it and how to get a basic stock\nof data for running integration tests in the first place. One chapter is dedicated to the challenges of managing\ncomponents and dependencies in which the authors discuss different strategies of versioning the components of your\napplication. It even comprises a short introduction to ",[585,43111,41686],{"href":24570,"rel":43112},[589],". In the following\nchapter the authors give an introduction to different revision control systems\nlike ",[585,43115,43118],{"href":43116,"rel":43117},"http://subversion.apache.org/",[589],"Subversion",[585,43120,7939],{"href":43121,"rel":43122},"http://gitscm.com/",[589],", as well as commercial alternatives like\nBitKeeper and ClearCase, and their respective advantages and disadvantages over the free alternatives. They continue to\ndescribe several advanced branching and integration strategies, each with its very own advantages and disadvantages in\ndifferent situations.",[18,43125,43126],{},"The last chapter swiftly copes with rather non-technical questions like the project lifecycle risk management and how\ncompliance and auditing are handled in a project using continuous delivery.",[18,43128,43129,43130,43133],{},"The concepts detailed in ",[573,43131,43132],{},"Continuous Delivery"," are not new per se but it’s the first book I read that really brought\nthese together in one coherent narrative. In fact, most of the concepts will seem to be obvious once you’ve read and\ngrokked them, but somehow nobody ever thought about them in depth.",[18,43135,43136],{},"Some of the distilled concepts are:",[577,43138,43139,43146,43149,43152,43159],{},[580,43140,43141,43142,43145],{},"Build binaries exactly ",[27,43143,43144],{},"once",", store them in your artifact repository and promote them through the complete\ndeployment pipeline.",[580,43147,43148],{},"Only promote builds into staging or production that pass all unit and acceptance tests.",[580,43150,43151],{},"The development, testing, UAT and staging environments should be as similar as possible to the production environment.",[580,43153,43154,43155,43158],{},"Automate ",[27,43156,43157],{},"everything",": builds, configuration, tests. Human interaction is prone to error, try to avoid it wherever\npossible.",[580,43160,43161,43162],{},"Use version control for ",[27,43163,43164],{},"everything, including the configuration of underlying operating systems and infrastructure\nsuch as networking equipment.",[18,43166,43167,43169,43170,43175,43176,986],{},[573,43168,43132],{}," has rightfully received much praise around the Internet and especially in the recently popularized\nDevOps movement. In 2011, the authors also won a ",[585,43171,43174],{"href":43172,"rel":43173},"http://drdobbs.com/joltawards/231500080?pgno=7",[589],"Jolt Excellence Award","\nin the category ",[573,43177,43178],{},"The Best Books",[18,43180,43181,43182,43187],{},"One thing I didn’t like about the book is the way online sources have been referenced in the text. Whenever the authors\nreference a website they provide an alphanumeric shortcode you know from URL shorteners like TinyURL. In fact that’s\nexactly what they are. These shortcodes can be used with Bit.ly or, as a fallback, directly from\nthe ",[585,43183,43186],{"href":43184,"rel":43185},"http://continuousdelivery.com/",[589],"supporting website"," of the book.",[18,43189,43190],{},"Example:",[577,43192,43193,43200],{},[580,43194,43195],{},[585,43196,43199],{"href":43197,"rel":43198},"http://bit.ly/bibNp0",[589],"bit.ly/bibNp0",[580,43201,43202],{},[585,43203,43206],{"href":43204,"rel":43205},"http://continuousdelivery.com/go/bibNp0",[589],"continuousdelivery.com/go/bibNp0",[18,43208,43209,43210,986],{},"This often interrupts the flow of reading. Instead a more traditional style, e. g. placing the shortcodes in footnotes,\nwould have been preferable in the printed version of the book. I also missed a list of all referenced online sources,\neither at the end of each chapter or in a separate appendix. Fortunately this is really the only criticism I have for\n",[573,43211,43132],{},[18,43213,43214,43215,43217],{},"As a conclusion, I can really recommend reading ",[573,43216,43132],{}," to anyone involved in developing and delivering\nsoftware. It will provide some new points of view on your work and give you some new ideas about how to improve your\ncurrent processes. I, for one, am looking forward to applying the principles outlined in this book to some of our\nprojects.",[18,43219,43220,43221,2246,43224,986],{},"If you’re hooked now you might want to read the sample chapter from ",[573,43222,43223],{},"Continuous\nDelivery",[585,43225,43228],{"href":43226,"rel":43227},"http://www.informit.com/content/images/9780321601919/samplepages/0321601912.pdf",[589],"Chapter 5 – Anatomy of the Deployment Pipeline",[18,43230,43231,43232],{},"Oh, and by the way: ",[585,43233,43236],{"href":43234,"rel":43235},"https://jobs.synyx.de/",[589],"We’re hiring!",{"title":48,"searchDepth":86,"depth":86,"links":43238},[],[613],"2011-08-23T12:00:36","Following our principle of Continuous Skill Enhancement here at Synyx I\\nrecently read the\\nbook Continuous Delivery: Reliable Software Releases through Build, Test, and Deployment Automation\\nby Jez Humble (from ThoughtWorks)\\nand David Farley (from LMAX).","https://synyx.de/blog/continuous-delivery-or-how-i-learned-to-stop-worrying-and-love-the-pipeline/",{},"/blog/continuous-delivery-or-how-i-learned-to-stop-worrying-and-love-the-pipeline",{"title":43029,"description":43246},"Following our principle of Continuous Skill Enhancement here at Synyx I\nrecently read the\nbook Continuous Delivery: Reliable Software Releases through Build, Test, and Deployment Automation\nby Jez Humble (from ThoughtWorks)\nand David Farley (from LMAX).","blog/continuous-delivery-or-how-i-learned-to-stop-worrying-and-love-the-pipeline",[43249,32780,32781,21727,18709,6884],"book-review","Following our principle of Continuous Skill Enhancement here at Synyx I recently read the book Continuous Delivery: Reliable Software Releases through Build, Test, and Deployment Automation by Jez Humble (from…","f_JR-CIvgZ2VCVSWsQjdTvAK1X1BsmKrdOPqauLk5lc",{"id":43253,"title":43254,"author":43255,"body":43257,"category":43284,"date":43285,"description":43270,"extension":617,"link":43286,"meta":43287,"navigation":499,"path":43288,"seo":43289,"slug":43261,"stem":43290,"tags":43291,"teaser":43294,"__hash__":43295},"blog/blog/endlich-mal-mit-profis-arbeiten.md","Endlich mal mit Profis arbeiten?",[43256],"karrasz",{"type":11,"value":43258,"toc":43282},[43259,43262,43271,43274],[14,43260,43254],{"id":43261},"endlich-mal-mit-profis-arbeiten",[18,43263,43264,43268],{},[2223,43265],{"alt":43266,"src":43267},"\" Java Entwickler wanted\"","https://media.synyx.de/uploads//2011/07/java_entwickler_wanted.jpg",[27,43269,43270],{},"Wir suchen ab sofort Verstärkung für unser Individualsoftware-Team!",[18,43272,43273],{},"Interessante Projekte, nette Arbeitsatmosphäre und alles, was man sonst so braucht.",[18,43275,43276,43277],{},"Schau mal rein, egal ob Du zum reinen Entwickler, zum Softwarearchitekten oder zum Kommunikationsgenie\ntendierst. ",[585,43278,43281],{"href":43234,"rel":43279,"title":43280},[589],"Java Entwickler wanted!","Mehr Infos",{"title":48,"searchDepth":86,"depth":86,"links":43283},[],[613],"2011-07-06T10:15:59","https://synyx.de/blog/endlich-mal-mit-profis-arbeiten/",{},"/blog/endlich-mal-mit-profis-arbeiten",{"title":43254,"description":43270},"blog/endlich-mal-mit-profis-arbeiten",[43292,290,43293,6885],"entwickler","job","Wir suchen ab sofort Verstärkung für unser Individualsoftware-Team! Interessante Projekte, nette Arbeitsatmosphäre und alles, was man sonst so braucht. Schau mal rein, egal ob Du zum reinen Entwickler, zum Softwarearchitekten…","OU-Bssp8svksPjHhMZcY_bdkyP4iXjbnMH0ig1Zq5jI",{"id":43297,"title":43298,"author":43299,"body":43300,"category":45125,"date":45126,"description":45127,"extension":617,"link":45128,"meta":45129,"navigation":499,"path":45130,"seo":45131,"slug":43304,"stem":45133,"tags":45134,"teaser":45140,"__hash__":45141},"blog/blog/the-tale-of-jboss-and-the-7-little-logging-frameworks.md","The Tale of JBoss and the 7 Little Logging Frameworks",[43031],{"type":11,"value":43301,"toc":45121},[43302,43305,43314,43320,43323,43338,43356,43375,43384,43393,43402,43407,43410,43419,43485,43494,43552,43567,44899,44902,44941,44956,44974,44977,45027,45031,45088,45095,45099,45118],[14,43303,43298],{"id":43304},"the-tale-of-jboss-and-the-7-little-logging-frameworks",[18,43306,43307,43308,43313],{},"At Synyx we’re currently taking care of a rather large legacy project for one of our customers in the course of\nour ",[585,43309,43312],{"href":43310,"rel":43311},"https://synyx.de/code-clinic-softwareoptimierung/",[589],"Code Clinic"," services. The project comprises several components\nsuch as a fat client implemented with a custom UI framework on top of Swing, a bulky web application using a mixture of\ncustom and obsolete frameworks, and a lot of asynchronously running jobs to process input from other systems involving\ncustom XSL transformations and a heap of stored procedures in a Oracle 9i database. You get the picture, it’s the\nprototype of a legacy system.",[18,43315,43316],{},[2223,43317],{"alt":43318,"src":43319},"\"7 Little Logging Frameworks\"","https://media.synyx.de/uploads//2011/06/7dwarves.jpg",[18,43321,43322],{},"7 Little Logging Frameworks on their way into your code base",[18,43324,43325,43326,43331,43332,43337],{},"The original developers of the system suffered a serious case of the\nwell-known ",[585,43327,43330],{"href":43328,"rel":43329},"http://en.wikipedia.org/wiki/Not_Invented_Here",[589],"NIH syndrome"," and thus a lot\nof ",[585,43333,43336],{"href":43334,"rel":43335},"http://www.martinfowler.com/bliki/TechnicalDebt.html",[589],"technical debt"," has been piled up over the course of its\ndevelopment.",[18,43339,43340,43341,99,43346,4816,43351,986],{},"One peculiar case was the use of about 7 different logging abstractions scattered over the whole code base. While some (\nwell, just one) of the implementations provide a certain added value, the other ones were just plain wrappers around the\neventually used logging frameworks. They literally just added a bad API on top of the other. There were also at least\nthree different logging frameworks in use,\nnamely ",[585,43342,43345],{"href":43343,"rel":43344},"http://commons.apache.org/logging/",[589],"Apache Commons Logging",[585,43347,43350],{"href":43348,"rel":43349},"http://www.slf4j.org/",[589],"SLF4J",[585,43352,43355],{"href":43353,"rel":43354},"http://logging.apache.org/log4j/",[589],"Log4j",[18,43357,43358,43359,4816,43364,43369,43370,11016],{},"In order to consolidate the code base, to reduce the dependencies on external frameworks and to prevent conflicts\ninduced by the use of generic class names like Log or Logger we decided to clean up this mess and use SLF4J with its\nLog4j back end as our authoritative logging framework in this project. We chose SLF4J for several (hopefully good)\nreasons, e. g. the low number of dependencies, the ability to plug in the logging framework of choice at deployment\ntime, the clean API and the good support for legacy logging frameworks. Also read the\narticles ",[585,43360,43363],{"href":43361,"rel":43362},"http://bsnyderblog.blogspot.com/2007/08/my-soapbox-for-slf4j.html",[589],"My Soapbox for SLF4J",[585,43365,43368],{"href":43366,"rel":43367},"http://blog.frankel.ch/thoughts-on-java-logging-and-slf4j",[589],"Thoughts on Java logging and SLF4J"," for a more detailed\ndiscussion of SLF4J’s features. Did I mention SLF4J also ",[585,43371,43374],{"href":43372,"rel":43373},"http://www.slf4j.org/android/",[589],"works on Android",[18,43376,43377,43378,43383],{},"SLF4J thankfully makes it easy to ",[585,43379,43382],{"href":43380,"rel":43381},"http://www.slf4j.org/legacy.html",[589],"bridge legacy logging frameworks"," so that you don’t\nhave to refactor all your code at once but could incrementally migrate your code base while still using SLF4J under the\nhood.",[18,43385,43386,43387,43392],{},"As you might have already guessed from the title of this blog post, the project uses the\nestablished ",[585,43388,43391],{"href":43389,"rel":43390},"http://www.jboss.org/",[589],"JBoss"," Application Server. Unfortunately it’s currently stuck at version\n4.0.3.SP1 but that’s another topic.",[18,43394,43395,43396,43401],{},"After deciding on which framework to use we started refactoring our code base. At first this wasn’t a problem. Deleting\nthe legacy logging classes and then replacing their calls with our authoritative\nSLF4J ",[585,43397,43400],{"href":43398,"rel":43399},"http://www.slf4j.org/apidocs/org/slf4j/Logger.html",[589],"Logger"," was straight forward. Since the logging classes were\nscattered across the whole code base we sometimes missed a dependency in another component of the project, but that’s\nwhat a Continuous Integration system is for after all.",[18,43403,43404],{},[2223,43405],{"alt":43406,"src":43348},"\"SLF4J Logo\"",[18,43408,43409],{},"Simple Logging Facade for Java",[18,43411,43412,43413,43418],{},"We also adapted the dependencies in the components’ POM files to use SLF4J’s commons-logging bridge instead of\ncommons-logging itself. Pro tip from the trenches: Don’t confuse jcl-over-slf4j.jar with slf4j-jcl.jar! The former\nis the API bridge we wanted to use, the latter is a SLF4J backend implementation using commons-logging. In order to get\nthe dependencies straight we used\nMaven’s ",[585,43414,43417],{"href":43415,"rel":43416},"http://maven.apache.org/guides/introduction/introduction-to-dependency-mechanism.html#Dependency_Management",[589],"dependency management","\nfeature and added the following declarations in the parent POM for the project’s components:",[43,43420,43422],{"className":3792,"code":43421,"language":3794,"meta":48,"style":48},"\u003CdependencyManagement>\n \u003Cdependency>\n \u003CgroupId>commons-logging\u003C/groupId>\n \u003CartifactId>commons-logging\u003C/artifactId>\n \u003Cversion>1.1.1\u003C/version>\n \u003Cscope>provided\u003C/scope>\n \u003C/dependency>\n \u003Cdependency>\n \u003CgroupId>org.slf4j\u003C/groupId>\n \u003CartifactId>jcl-over-slf4j\u003C/artifactId>\n \u003Cversion>1.6.1\u003C/version>\n \u003C/dependency>\n\u003C/dependencyManagement>\n",[50,43423,43424,43429,43433,43438,43443,43448,43453,43457,43461,43466,43471,43476,43480],{"__ignoreMap":48},[53,43425,43426],{"class":55,"line":56},[53,43427,43428],{},"\u003CdependencyManagement>\n",[53,43430,43431],{"class":55,"line":86},[53,43432,35865],{},[53,43434,43435],{"class":55,"line":126},[53,43436,43437],{}," \u003CgroupId>commons-logging\u003C/groupId>\n",[53,43439,43440],{"class":55,"line":163},[53,43441,43442],{}," \u003CartifactId>commons-logging\u003C/artifactId>\n",[53,43444,43445],{"class":55,"line":186},[53,43446,43447],{}," \u003Cversion>1.1.1\u003C/version>\n",[53,43449,43450],{"class":55,"line":221},[53,43451,43452],{}," \u003Cscope>provided\u003C/scope>\n",[53,43454,43455],{"class":55,"line":242},[53,43456,35890],{},[53,43458,43459],{"class":55,"line":273},[53,43460,35865],{},[53,43462,43463],{"class":55,"line":279},[53,43464,43465],{}," \u003CgroupId>org.slf4j\u003C/groupId>\n",[53,43467,43468],{"class":55,"line":496},[53,43469,43470],{}," \u003CartifactId>jcl-over-slf4j\u003C/artifactId>\n",[53,43472,43473],{"class":55,"line":503},[53,43474,43475],{}," \u003Cversion>1.6.1\u003C/version>\n",[53,43477,43478],{"class":55,"line":509},[53,43479,35890],{},[53,43481,43482],{"class":55,"line":515},[53,43483,43484],{},"\u003C/dependencyManagement>\n",[18,43486,43487,43488,43493],{},"By setting\nthe ",[585,43489,43492],{"href":43490,"rel":43491},"http://maven.apache.org/guides/introduction/introduction-to-dependency-mechanism.html#Dependency_Scope",[589],"scope"," of\ncommons-logging to “provided” we made sure that it would not be pulled in accidentally. Unfortunately this scope is not\ntransitive and we had to explicitly exclude commons-logging (and other artifacts) from some of our dependencies:",[43,43495,43497],{"className":3792,"code":43496,"language":3794,"meta":48,"style":48},"\u003Cdependency>\n \u003CgroupId>org.apache.ws.commons.axiom\u003C/groupId>\n \u003CartifactId>axiom-api\u003C/artifactId>\n \u003Cversion>1.2\u003C/version>\n \u003Cexclusions>\n \u003Cexclusion>\n \u003CartifactId>commons-logging\u003C/artifactId>\n \u003CgroupId>commons-logging\u003C/groupId>\n \u003C/exclusion>\n \u003C/exclusions>\n\u003C/dependency>\n",[50,43498,43499,43503,43508,43513,43518,43523,43528,43533,43538,43543,43548],{"__ignoreMap":48},[53,43500,43501],{"class":55,"line":56},[53,43502,36877],{},[53,43504,43505],{"class":55,"line":86},[53,43506,43507],{}," \u003CgroupId>org.apache.ws.commons.axiom\u003C/groupId>\n",[53,43509,43510],{"class":55,"line":126},[53,43511,43512],{}," \u003CartifactId>axiom-api\u003C/artifactId>\n",[53,43514,43515],{"class":55,"line":163},[53,43516,43517],{}," \u003Cversion>1.2\u003C/version>\n",[53,43519,43520],{"class":55,"line":186},[53,43521,43522],{}," \u003Cexclusions>\n",[53,43524,43525],{"class":55,"line":221},[53,43526,43527],{}," \u003Cexclusion>\n",[53,43529,43530],{"class":55,"line":242},[53,43531,43532],{}," \u003CartifactId>commons-logging\u003C/artifactId>\n",[53,43534,43535],{"class":55,"line":273},[53,43536,43537],{}," \u003CgroupId>commons-logging\u003C/groupId>\n",[53,43539,43540],{"class":55,"line":279},[53,43541,43542],{}," \u003C/exclusion>\n",[53,43544,43545],{"class":55,"line":496},[53,43546,43547],{}," \u003C/exclusions>\n",[53,43549,43550],{"class":55,"line":503},[53,43551,36897],{},[18,43553,43554,43555,43560,43561,43566],{},"In order to analyze the dependency trees of our components\nthe ",[585,43556,43559],{"href":43557,"rel":43558},"http://maven.apache.org/plugins/maven-dependency-plugin/",[589],"Maven dependency plugin"," turned out to be very useful,\nespecially the ",[585,43562,43565],{"href":43563,"rel":43564},"http://maven.apache.org/plugins/maven-dependency-plugin/tree-mojo.html",[589],"dependency:tree"," mojo which will\nproduce a view of the project’s dependency tree including transitive dependencies. Of course a good IDE will also\nsupport you with graphical views of the dependency tree. Here’s the output of mvn dependency:tree for the aforementioned\nSwing application:",[43,43568,43570],{"className":30754,"code":43569,"language":30756,"meta":48,"style":48},"[INFO] [dependency:tree {execution: default-cli}]\n[INFO] com.example:swing-client:jar:1.1.0-SNAPSHOT\n[INFO] +- org.slf4j:slf4j-log4j12:jar:1.6.1:compile\n[INFO] | \\- log4j:log4j:jar:1.2.16:compile\n[INFO] +- jgoodies:plastic:jar:1.2.0:compile\n[INFO] +- com.toedter:jcalendar:jar:1.3.2:compile\n[INFO] +- com.example:jms:jar:1.0.1-SNAPSHOT:compile\n[INFO] | +- jboss:jboss-system:jar:4.0.2:compile\n[INFO] | +- jms:jms:jar:1.0.2:compile\n[INFO] | +- org.quartz-scheduler:quartz:jar:1.2.3:compile\n[INFO] | +- jboss:jbossmq-client:jar:4.0.2:compile\n[INFO] | +- jboss:jboss-jmx:jar:4.0.2:compile\n[INFO] | \\- javax.ejb:ejb:jar:2.1:compile\n[INFO] +- flex:flex-messaging-common:jar:1.0.0:compile\n[INFO] +- flex:flex-messaging-core:jar:1.0.0:compile\n[INFO] +- flex:flex-messaging-opt:jar:1.0.0:compile\n[INFO] +- flex:flex-messaging-proxy:jar:1.0.0:compile\n[INFO] +- flex:flex-messaging-remoting:jar:1.0.0:compile\n[INFO] +- poi:poi-2.5.1-final:jar:20040804:compile\n[INFO] +- com.example:docengine:jar:1.0.0-SNAPSHOT:compile\n[INFO] | +- com.example:webfw:jar:1.0.2-SNAPSHOT:compile\n[INFO] | | +- com.example:dms_client:jar:1.0.2:compile\n[INFO] | | | \\- org.apache.axis2:axis2:jar:1.1.1:compile\n[INFO] | | | +- org.apache.ws.commons.axiom:axiom-dom:jar:1.2:compile\n[INFO] | | | +- org.apache.ws.commons.axiom:axiom-impl:jar:1.2:compile\n[INFO] | | | +- ant:ant:jar:1.6.5:compile\n[INFO] | | | +- woodstox:wstx-asl:jar:3.0.1:compile\n[INFO] | | | +- org.apache.ws.commons.schema:XmlSchema:jar:1.2:compile\n[INFO] | | | +- annogen:annogen:jar:0.1.0:compile\n[INFO] | | | +- commons-httpclient:commons-httpclient:jar:3.0.1:compile\n[INFO] | | | | \\- commons-codec:commons-codec:jar:1.2:compile\n[INFO] | | | +- org.apache.httpcomponents:jakarta-httpcore:jar:4.0-alpha2:compile\n[INFO] | | | +- wsdl4j:wsdl4j:jar:1.6.1:compile\n[INFO] | | | +- backport-util-concurrent:backport-util-concurrent:jar:2.2:compile\n[INFO] | | | +- org.apache.ws.commons.neethi:neethi:jar:2.0:compile\n[INFO] | | | \\- org.apache.woden:woden-impl-om:jar:1.0M8:compile\n[INFO] | | | +- org.apache.woden:woden-api:jar:1.0M8:compile\n[INFO] | | | +- org.apache.ant:ant:jar:1.7.0:compile\n[INFO] | | | | \\- org.apache.ant:ant-launcher:jar:1.7.0:compile\n[INFO] | | | +- xerces:xmlParserAPIs:jar:2.6.0:compile\n[INFO] | | | \\- org.codehaus.woodstox:wstx-asl:jar:3.2.4:runtime\n[INFO] | | +- org.apache.ws.commons.axiom:axiom-api:jar:1.2:compile\n[INFO] | | | +- jaxen:jaxen:jar:1.1-beta-9:compile\n[INFO] | | | +- xml-apis:xml-apis:jar:1.3.03:compile\n[INFO] | | | \\- stax:stax-api:jar:1.0.1:compile\n[INFO] | | +- org.slf4j:jcl-over-slf4j:jar:1.6.1:compile\n[INFO] | | +- org.apache.struts:struts-core:jar:1.3.8:compile\n[INFO] | | | \\- commons-chain:commons-chain:jar:1.1:compile\n[INFO] | | +- com.sun.xml:xml:jar:0.8.0:compile\n[INFO] | | +- xmlc:xmlc-all-runtime:jar:2.2.8.1:compile\n[INFO] | | +- xalan:xalan:jar:2.7.1:compile\n[INFO] | | | \\- xalan:serializer:jar:2.7.1:compile\n[INFO] | | \\- com.lowagie:itext:jar:2.0.7:compile\n[INFO] | | +- bouncycastle:bcmail-jdk14:jar:138:compile\n[INFO] | | \\- bouncycastle:bcprov-jdk14:jar:138:compile\n[INFO] | +- org.apache.xmlgraphics:fop:jar:0.95-1:compile\n[INFO] | | +- org.apache.xmlgraphics:xmlgraphics-commons:jar:1.3.1:compile\n[INFO] | | +- org.apache.xmlgraphics:batik-svg-dom:jar:1.7:compile\n[INFO] | | | +- org.apache.xmlgraphics:batik-anim:jar:1.7:compile\n[INFO] | | | +- org.apache.xmlgraphics:batik-css:jar:1.7:compile\n[INFO] | | | +- org.apache.xmlgraphics:batik-dom:jar:1.7:compile\n[INFO] | | | +- org.apache.xmlgraphics:batik-parser:jar:1.7:compile\n[INFO] | | | +- org.apache.xmlgraphics:batik-util:jar:1.7:compile\n[INFO] | | | \\- xml-apis:xml-apis-ext:jar:1.3.04:compile\n[INFO] | | +- org.apache.xmlgraphics:batik-bridge:jar:1.7:compile\n[INFO] | | | +- org.apache.xmlgraphics:batik-script:jar:1.7:compile\n[INFO] | | | \\- org.apache.xmlgraphics:batik-xml:jar:1.7:compile\n[INFO] | | +- org.apache.xmlgraphics:batik-awt-util:jar:1.7:compile\n[INFO] | | +- org.apache.xmlgraphics:batik-gvt:jar:1.7:compile\n[INFO] | | +- org.apache.xmlgraphics:batik-transcoder:jar:1.7:compile\n[INFO] | | | \\- org.apache.xmlgraphics:batik-svggen:jar:1.7:compile\n[INFO] | | +- org.apache.xmlgraphics:batik-extension:jar:1.7:compile\n[INFO] | | +- org.apache.xmlgraphics:batik-ext:jar:1.7:compile\n[INFO] | | +- commons-io:commons-io:jar:1.3.1:compile\n[INFO] | | \\- org.apache.avalon.framework:avalon-framework-impl:jar:4.3.1:compile\n[INFO] | +- org.apache.avalon.framework:avalon-framework-api:jar:4.3.1:compile\n[INFO] | +- poi:poi:jar:2.5.1-final-20040804:compile\n[INFO] | +- javax.servlet:servlet-api:jar:2.5:compile\n[INFO] | +- javax.mail:mail:jar:1.4.1:compile\n[INFO] | | \\- javax.activation:activation:jar:1.1.1:compile\n[INFO] | +- org.openoffice:jurt:jar:3.2.1:compile\n[INFO] | | \\- org.openoffice:ridl:jar:3.2.1:compile\n[INFO] | +- org.openoffice:unoil:jar:3.1.0:compile\n[INFO] | +- org.openoffice:juh:jar:3.1.0:compile\n[INFO] | \\- ru.novosoft.dc:rtf2fo:jar:eval:compile\n[INFO] +- com.jgoodies:forms:jar:1.0.7:compile\n[INFO] +- xerces:xercesImpl:jar:2.4.0:compile\n[INFO] +- com.oracle:ojdbc5:jar:11.1.0.6.0:compile\n[INFO] +- javax.help:javahelp:jar:2.0.02:compile\n[INFO] +- com.example:custom-swing-framework:jar:1.1.2-SNAPSHOT:compile\n[INFO] | +- com.whirlycott:whirlycache:jar:0.7.1:compile\n[INFO] | | +- commons-collections:commons-collections:jar:3.1:compile\n[INFO] | | +- jdom:jdom:jar:1.0:compile\n[INFO] | | \\- concurrent:concurrent:jar:1.3.4:compile\n[INFO] | +- ehcache:ehcache:jar:0.9:compile\n[INFO] | +- org.enhydra.xmlc:xmlc:jar:2.2.7.1:compile\n[INFO] | +- com.jgoodies:looks:jar:2.2.2:compile\n[INFO] | +- org.swinglabs:swingx:jar:1.6.1:compile\n[INFO] | | +- com.jhlabs:filters:jar:2.0.235:compile\n[INFO] | | \\- org.swinglabs:swing-worker:jar:1.1:compile\n[INFO] | \\- struts:struts:jar:1.2.9:compile\n[INFO] | +- commons-beanutils:commons-beanutils:jar:1.7.0:compile\n[INFO] | +- commons-digester:commons-digester:jar:1.6:compile\n[INFO] | +- commons-fileupload:commons-fileupload:jar:1.0:compile\n[INFO] | +- commons-validator:commons-validator:jar:1.1.4:compile\n[INFO] | +- oro:oro:jar:2.0.7:compile\n[INFO] | \\- antlr:antlr:jar:2.7.2:compile\n[INFO] +- junit:junit:jar:4.8.2:test\n[INFO] \\- org.slf4j:slf4j-api:jar:1.6.1:compile\n[INFO] ------------------------------------------------------------------------\n[INFO] BUILD SUCCESSFUL\n[INFO] ------------------------------------------------------------------------\n[INFO] Total time: 2 seconds\n[INFO] Finished at: Tue Jun 28 14:10:29 CEST 2011\n[INFO] Final Memory: 22M/257M\n[INFO] ------------------------------------------------------------------------\n",[50,43571,43572,43577,43582,43587,43600,43605,43610,43615,43627,43638,43649,43660,43671,43682,43687,43692,43697,43702,43707,43712,43717,43728,43742,43757,43773,43788,43803,43818,43833,43848,43863,43881,43896,43911,43926,43941,43957,43973,43988,44006,44021,44037,44050,44065,44080,44095,44108,44121,44136,44149,44162,44175,44190,44203,44216,44229,44240,44253,44266,44281,44296,44311,44326,44341,44356,44369,44384,44399,44412,44425,44438,44453,44466,44479,44492,44505,44516,44527,44538,44549,44562,44573,44586,44597,44608,44619,44624,44629,44634,44639,44644,44655,44668,44681,44694,44705,44716,44727,44738,44751,44764,44775,44786,44797,44808,44819,44830,44842,44848,44859,44865,44871,44876,44882,44888,44894],{"__ignoreMap":48},[53,43573,43574],{"class":55,"line":56},[53,43575,43576],{"class":82},"[INFO] [dependency:tree {execution: default-cli}]\n",[53,43578,43579],{"class":55,"line":86},[53,43580,43581],{"class":82},"[INFO] com.example:swing-client:jar:1.1.0-SNAPSHOT\n",[53,43583,43584],{"class":55,"line":126},[53,43585,43586],{"class":82},"[INFO] +- org.slf4j:slf4j-log4j12:jar:1.6.1:compile\n",[53,43588,43589,43592,43594,43597],{"class":55,"line":163},[53,43590,43591],{"class":82},"[INFO] ",[53,43593,4459],{"class":389},[53,43595,43596],{"class":59}," \\-",[53,43598,43599],{"class":63}," log4j:log4j:jar:1.2.16:compile\n",[53,43601,43602],{"class":55,"line":186},[53,43603,43604],{"class":82},"[INFO] +- jgoodies:plastic:jar:1.2.0:compile\n",[53,43606,43607],{"class":55,"line":221},[53,43608,43609],{"class":82},"[INFO] +- com.toedter:jcalendar:jar:1.3.2:compile\n",[53,43611,43612],{"class":55,"line":242},[53,43613,43614],{"class":82},"[INFO] +- com.example:jms:jar:1.0.1-SNAPSHOT:compile\n",[53,43616,43617,43619,43621,43624],{"class":55,"line":273},[53,43618,43591],{"class":82},[53,43620,4459],{"class":389},[53,43622,43623],{"class":59}," +-",[53,43625,43626],{"class":63}," jboss:jboss-system:jar:4.0.2:compile\n",[53,43628,43629,43631,43633,43635],{"class":55,"line":279},[53,43630,43591],{"class":82},[53,43632,4459],{"class":389},[53,43634,43623],{"class":59},[53,43636,43637],{"class":63}," jms:jms:jar:1.0.2:compile\n",[53,43639,43640,43642,43644,43646],{"class":55,"line":496},[53,43641,43591],{"class":82},[53,43643,4459],{"class":389},[53,43645,43623],{"class":59},[53,43647,43648],{"class":63}," org.quartz-scheduler:quartz:jar:1.2.3:compile\n",[53,43650,43651,43653,43655,43657],{"class":55,"line":503},[53,43652,43591],{"class":82},[53,43654,4459],{"class":389},[53,43656,43623],{"class":59},[53,43658,43659],{"class":63}," jboss:jbossmq-client:jar:4.0.2:compile\n",[53,43661,43662,43664,43666,43668],{"class":55,"line":509},[53,43663,43591],{"class":82},[53,43665,4459],{"class":389},[53,43667,43623],{"class":59},[53,43669,43670],{"class":63}," jboss:jboss-jmx:jar:4.0.2:compile\n",[53,43672,43673,43675,43677,43679],{"class":55,"line":515},[53,43674,43591],{"class":82},[53,43676,4459],{"class":389},[53,43678,43596],{"class":59},[53,43680,43681],{"class":63}," javax.ejb:ejb:jar:2.1:compile\n",[53,43683,43684],{"class":55,"line":521},[53,43685,43686],{"class":82},"[INFO] +- flex:flex-messaging-common:jar:1.0.0:compile\n",[53,43688,43689],{"class":55,"line":527},[53,43690,43691],{"class":82},"[INFO] +- flex:flex-messaging-core:jar:1.0.0:compile\n",[53,43693,43694],{"class":55,"line":533},[53,43695,43696],{"class":82},"[INFO] +- flex:flex-messaging-opt:jar:1.0.0:compile\n",[53,43698,43699],{"class":55,"line":539},[53,43700,43701],{"class":82},"[INFO] +- flex:flex-messaging-proxy:jar:1.0.0:compile\n",[53,43703,43704],{"class":55,"line":545},[53,43705,43706],{"class":82},"[INFO] +- flex:flex-messaging-remoting:jar:1.0.0:compile\n",[53,43708,43709],{"class":55,"line":2414},[53,43710,43711],{"class":82},"[INFO] +- poi:poi-2.5.1-final:jar:20040804:compile\n",[53,43713,43714],{"class":55,"line":2426},[53,43715,43716],{"class":82},"[INFO] +- com.example:docengine:jar:1.0.0-SNAPSHOT:compile\n",[53,43718,43719,43721,43723,43725],{"class":55,"line":2438},[53,43720,43591],{"class":82},[53,43722,4459],{"class":389},[53,43724,43623],{"class":59},[53,43726,43727],{"class":63}," com.example:webfw:jar:1.0.2-SNAPSHOT:compile\n",[53,43729,43730,43732,43734,43737,43739],{"class":55,"line":2451},[53,43731,43591],{"class":82},[53,43733,4459],{"class":389},[53,43735,43736],{"class":389}," |",[53,43738,43623],{"class":59},[53,43740,43741],{"class":63}," com.example:dms_client:jar:1.0.2:compile\n",[53,43743,43744,43746,43748,43750,43752,43754],{"class":55,"line":2459},[53,43745,43591],{"class":82},[53,43747,4459],{"class":389},[53,43749,43736],{"class":389},[53,43751,43736],{"class":389},[53,43753,43596],{"class":59},[53,43755,43756],{"class":63}," org.apache.axis2:axis2:jar:1.1.1:compile\n",[53,43758,43759,43761,43763,43765,43767,43770],{"class":55,"line":2470},[53,43760,43591],{"class":82},[53,43762,4459],{"class":389},[53,43764,43736],{"class":389},[53,43766,43736],{"class":389},[53,43768,43769],{"class":59}," +-",[53,43771,43772],{"class":63}," org.apache.ws.commons.axiom:axiom-dom:jar:1.2:compile\n",[53,43774,43775,43777,43779,43781,43783,43785],{"class":55,"line":2476},[53,43776,43591],{"class":82},[53,43778,4459],{"class":389},[53,43780,43736],{"class":389},[53,43782,43736],{"class":389},[53,43784,43769],{"class":59},[53,43786,43787],{"class":63}," org.apache.ws.commons.axiom:axiom-impl:jar:1.2:compile\n",[53,43789,43790,43792,43794,43796,43798,43800],{"class":55,"line":2484},[53,43791,43591],{"class":82},[53,43793,4459],{"class":389},[53,43795,43736],{"class":389},[53,43797,43736],{"class":389},[53,43799,43769],{"class":59},[53,43801,43802],{"class":63}," ant:ant:jar:1.6.5:compile\n",[53,43804,43805,43807,43809,43811,43813,43815],{"class":55,"line":2490},[53,43806,43591],{"class":82},[53,43808,4459],{"class":389},[53,43810,43736],{"class":389},[53,43812,43736],{"class":389},[53,43814,43769],{"class":59},[53,43816,43817],{"class":63}," woodstox:wstx-asl:jar:3.0.1:compile\n",[53,43819,43820,43822,43824,43826,43828,43830],{"class":55,"line":2495},[53,43821,43591],{"class":82},[53,43823,4459],{"class":389},[53,43825,43736],{"class":389},[53,43827,43736],{"class":389},[53,43829,43769],{"class":59},[53,43831,43832],{"class":63}," org.apache.ws.commons.schema:XmlSchema:jar:1.2:compile\n",[53,43834,43835,43837,43839,43841,43843,43845],{"class":55,"line":2507},[53,43836,43591],{"class":82},[53,43838,4459],{"class":389},[53,43840,43736],{"class":389},[53,43842,43736],{"class":389},[53,43844,43769],{"class":59},[53,43846,43847],{"class":63}," annogen:annogen:jar:0.1.0:compile\n",[53,43849,43850,43852,43854,43856,43858,43860],{"class":55,"line":2528},[53,43851,43591],{"class":82},[53,43853,4459],{"class":389},[53,43855,43736],{"class":389},[53,43857,43736],{"class":389},[53,43859,43769],{"class":59},[53,43861,43862],{"class":63}," commons-httpclient:commons-httpclient:jar:3.0.1:compile\n",[53,43864,43865,43867,43869,43871,43873,43876,43878],{"class":55,"line":2539},[53,43866,43591],{"class":82},[53,43868,4459],{"class":389},[53,43870,43736],{"class":389},[53,43872,43736],{"class":389},[53,43874,43875],{"class":389}," |",[53,43877,43596],{"class":59},[53,43879,43880],{"class":63}," commons-codec:commons-codec:jar:1.2:compile\n",[53,43882,43883,43885,43887,43889,43891,43893],{"class":55,"line":2551},[53,43884,43591],{"class":82},[53,43886,4459],{"class":389},[53,43888,43736],{"class":389},[53,43890,43736],{"class":389},[53,43892,43769],{"class":59},[53,43894,43895],{"class":63}," org.apache.httpcomponents:jakarta-httpcore:jar:4.0-alpha2:compile\n",[53,43897,43898,43900,43902,43904,43906,43908],{"class":55,"line":2562},[53,43899,43591],{"class":82},[53,43901,4459],{"class":389},[53,43903,43736],{"class":389},[53,43905,43736],{"class":389},[53,43907,43769],{"class":59},[53,43909,43910],{"class":63}," wsdl4j:wsdl4j:jar:1.6.1:compile\n",[53,43912,43913,43915,43917,43919,43921,43923],{"class":55,"line":2573},[53,43914,43591],{"class":82},[53,43916,4459],{"class":389},[53,43918,43736],{"class":389},[53,43920,43736],{"class":389},[53,43922,43769],{"class":59},[53,43924,43925],{"class":63}," backport-util-concurrent:backport-util-concurrent:jar:2.2:compile\n",[53,43927,43928,43930,43932,43934,43936,43938],{"class":55,"line":2585},[53,43929,43591],{"class":82},[53,43931,4459],{"class":389},[53,43933,43736],{"class":389},[53,43935,43736],{"class":389},[53,43937,43769],{"class":59},[53,43939,43940],{"class":63}," org.apache.ws.commons.neethi:neethi:jar:2.0:compile\n",[53,43942,43943,43945,43947,43949,43951,43954],{"class":55,"line":2593},[53,43944,43591],{"class":82},[53,43946,4459],{"class":389},[53,43948,43736],{"class":389},[53,43950,43736],{"class":389},[53,43952,43953],{"class":59}," \\-",[53,43955,43956],{"class":63}," org.apache.woden:woden-impl-om:jar:1.0M8:compile\n",[53,43958,43959,43961,43963,43965,43967,43970],{"class":55,"line":2600},[53,43960,43591],{"class":82},[53,43962,4459],{"class":389},[53,43964,43736],{"class":389},[53,43966,43736],{"class":389},[53,43968,43969],{"class":59}," +-",[53,43971,43972],{"class":63}," org.apache.woden:woden-api:jar:1.0M8:compile\n",[53,43974,43975,43977,43979,43981,43983,43985],{"class":55,"line":2605},[53,43976,43591],{"class":82},[53,43978,4459],{"class":389},[53,43980,43736],{"class":389},[53,43982,43736],{"class":389},[53,43984,43969],{"class":59},[53,43986,43987],{"class":63}," org.apache.ant:ant:jar:1.7.0:compile\n",[53,43989,43990,43992,43994,43996,43998,44001,44003],{"class":55,"line":2610},[53,43991,43591],{"class":82},[53,43993,4459],{"class":389},[53,43995,43736],{"class":389},[53,43997,43736],{"class":389},[53,43999,44000],{"class":389}," |",[53,44002,43596],{"class":59},[53,44004,44005],{"class":63}," org.apache.ant:ant-launcher:jar:1.7.0:compile\n",[53,44007,44008,44010,44012,44014,44016,44018],{"class":55,"line":2622},[53,44009,43591],{"class":82},[53,44011,4459],{"class":389},[53,44013,43736],{"class":389},[53,44015,43736],{"class":389},[53,44017,43969],{"class":59},[53,44019,44020],{"class":63}," xerces:xmlParserAPIs:jar:2.6.0:compile\n",[53,44022,44023,44025,44027,44029,44031,44034],{"class":55,"line":2638},[53,44024,43591],{"class":82},[53,44026,4459],{"class":389},[53,44028,43736],{"class":389},[53,44030,43736],{"class":389},[53,44032,44033],{"class":59}," \\-",[53,44035,44036],{"class":63}," org.codehaus.woodstox:wstx-asl:jar:3.2.4:runtime\n",[53,44038,44039,44041,44043,44045,44047],{"class":55,"line":2649},[53,44040,43591],{"class":82},[53,44042,4459],{"class":389},[53,44044,43736],{"class":389},[53,44046,43623],{"class":59},[53,44048,44049],{"class":63}," org.apache.ws.commons.axiom:axiom-api:jar:1.2:compile\n",[53,44051,44052,44054,44056,44058,44060,44062],{"class":55,"line":2660},[53,44053,43591],{"class":82},[53,44055,4459],{"class":389},[53,44057,43736],{"class":389},[53,44059,43736],{"class":389},[53,44061,43623],{"class":59},[53,44063,44064],{"class":63}," jaxen:jaxen:jar:1.1-beta-9:compile\n",[53,44066,44067,44069,44071,44073,44075,44077],{"class":55,"line":2672},[53,44068,43591],{"class":82},[53,44070,4459],{"class":389},[53,44072,43736],{"class":389},[53,44074,43736],{"class":389},[53,44076,43623],{"class":59},[53,44078,44079],{"class":63}," xml-apis:xml-apis:jar:1.3.03:compile\n",[53,44081,44082,44084,44086,44088,44090,44092],{"class":55,"line":2683},[53,44083,43591],{"class":82},[53,44085,4459],{"class":389},[53,44087,43736],{"class":389},[53,44089,43736],{"class":389},[53,44091,43596],{"class":59},[53,44093,44094],{"class":63}," stax:stax-api:jar:1.0.1:compile\n",[53,44096,44097,44099,44101,44103,44105],{"class":55,"line":2694},[53,44098,43591],{"class":82},[53,44100,4459],{"class":389},[53,44102,43736],{"class":389},[53,44104,43623],{"class":59},[53,44106,44107],{"class":63}," org.slf4j:jcl-over-slf4j:jar:1.6.1:compile\n",[53,44109,44110,44112,44114,44116,44118],{"class":55,"line":2701},[53,44111,43591],{"class":82},[53,44113,4459],{"class":389},[53,44115,43736],{"class":389},[53,44117,43623],{"class":59},[53,44119,44120],{"class":63}," org.apache.struts:struts-core:jar:1.3.8:compile\n",[53,44122,44123,44125,44127,44129,44131,44133],{"class":55,"line":2708},[53,44124,43591],{"class":82},[53,44126,4459],{"class":389},[53,44128,43736],{"class":389},[53,44130,43736],{"class":389},[53,44132,43596],{"class":59},[53,44134,44135],{"class":63}," commons-chain:commons-chain:jar:1.1:compile\n",[53,44137,44138,44140,44142,44144,44146],{"class":55,"line":2713},[53,44139,43591],{"class":82},[53,44141,4459],{"class":389},[53,44143,43736],{"class":389},[53,44145,43623],{"class":59},[53,44147,44148],{"class":63}," com.sun.xml:xml:jar:0.8.0:compile\n",[53,44150,44151,44153,44155,44157,44159],{"class":55,"line":2718},[53,44152,43591],{"class":82},[53,44154,4459],{"class":389},[53,44156,43736],{"class":389},[53,44158,43623],{"class":59},[53,44160,44161],{"class":63}," xmlc:xmlc-all-runtime:jar:2.2.8.1:compile\n",[53,44163,44164,44166,44168,44170,44172],{"class":55,"line":2730},[53,44165,43591],{"class":82},[53,44167,4459],{"class":389},[53,44169,43736],{"class":389},[53,44171,43623],{"class":59},[53,44173,44174],{"class":63}," xalan:xalan:jar:2.7.1:compile\n",[53,44176,44177,44179,44181,44183,44185,44187],{"class":55,"line":2755},[53,44178,43591],{"class":82},[53,44180,4459],{"class":389},[53,44182,43736],{"class":389},[53,44184,43736],{"class":389},[53,44186,43596],{"class":59},[53,44188,44189],{"class":63}," xalan:serializer:jar:2.7.1:compile\n",[53,44191,44192,44194,44196,44198,44200],{"class":55,"line":2767},[53,44193,43591],{"class":82},[53,44195,4459],{"class":389},[53,44197,43736],{"class":389},[53,44199,43596],{"class":59},[53,44201,44202],{"class":63}," com.lowagie:itext:jar:2.0.7:compile\n",[53,44204,44205,44207,44209,44211,44213],{"class":55,"line":2778},[53,44206,43591],{"class":82},[53,44208,4459],{"class":389},[53,44210,43736],{"class":389},[53,44212,43769],{"class":59},[53,44214,44215],{"class":63}," bouncycastle:bcmail-jdk14:jar:138:compile\n",[53,44217,44218,44220,44222,44224,44226],{"class":55,"line":2789},[53,44219,43591],{"class":82},[53,44221,4459],{"class":389},[53,44223,43736],{"class":389},[53,44225,43953],{"class":59},[53,44227,44228],{"class":63}," bouncycastle:bcprov-jdk14:jar:138:compile\n",[53,44230,44231,44233,44235,44237],{"class":55,"line":2800},[53,44232,43591],{"class":82},[53,44234,4459],{"class":389},[53,44236,43623],{"class":59},[53,44238,44239],{"class":63}," org.apache.xmlgraphics:fop:jar:0.95-1:compile\n",[53,44241,44242,44244,44246,44248,44250],{"class":55,"line":2812},[53,44243,43591],{"class":82},[53,44245,4459],{"class":389},[53,44247,43736],{"class":389},[53,44249,43623],{"class":59},[53,44251,44252],{"class":63}," org.apache.xmlgraphics:xmlgraphics-commons:jar:1.3.1:compile\n",[53,44254,44255,44257,44259,44261,44263],{"class":55,"line":2819},[53,44256,43591],{"class":82},[53,44258,4459],{"class":389},[53,44260,43736],{"class":389},[53,44262,43623],{"class":59},[53,44264,44265],{"class":63}," org.apache.xmlgraphics:batik-svg-dom:jar:1.7:compile\n",[53,44267,44268,44270,44272,44274,44276,44278],{"class":55,"line":2832},[53,44269,43591],{"class":82},[53,44271,4459],{"class":389},[53,44273,43736],{"class":389},[53,44275,43736],{"class":389},[53,44277,43623],{"class":59},[53,44279,44280],{"class":63}," org.apache.xmlgraphics:batik-anim:jar:1.7:compile\n",[53,44282,44283,44285,44287,44289,44291,44293],{"class":55,"line":2845},[53,44284,43591],{"class":82},[53,44286,4459],{"class":389},[53,44288,43736],{"class":389},[53,44290,43736],{"class":389},[53,44292,43623],{"class":59},[53,44294,44295],{"class":63}," org.apache.xmlgraphics:batik-css:jar:1.7:compile\n",[53,44297,44298,44300,44302,44304,44306,44308],{"class":55,"line":2856},[53,44299,43591],{"class":82},[53,44301,4459],{"class":389},[53,44303,43736],{"class":389},[53,44305,43736],{"class":389},[53,44307,43623],{"class":59},[53,44309,44310],{"class":63}," org.apache.xmlgraphics:batik-dom:jar:1.7:compile\n",[53,44312,44313,44315,44317,44319,44321,44323],{"class":55,"line":2861},[53,44314,43591],{"class":82},[53,44316,4459],{"class":389},[53,44318,43736],{"class":389},[53,44320,43736],{"class":389},[53,44322,43623],{"class":59},[53,44324,44325],{"class":63}," org.apache.xmlgraphics:batik-parser:jar:1.7:compile\n",[53,44327,44328,44330,44332,44334,44336,44338],{"class":55,"line":2868},[53,44329,43591],{"class":82},[53,44331,4459],{"class":389},[53,44333,43736],{"class":389},[53,44335,43736],{"class":389},[53,44337,43623],{"class":59},[53,44339,44340],{"class":63}," org.apache.xmlgraphics:batik-util:jar:1.7:compile\n",[53,44342,44343,44345,44347,44349,44351,44353],{"class":55,"line":2873},[53,44344,43591],{"class":82},[53,44346,4459],{"class":389},[53,44348,43736],{"class":389},[53,44350,43736],{"class":389},[53,44352,43596],{"class":59},[53,44354,44355],{"class":63}," xml-apis:xml-apis-ext:jar:1.3.04:compile\n",[53,44357,44358,44360,44362,44364,44366],{"class":55,"line":2878},[53,44359,43591],{"class":82},[53,44361,4459],{"class":389},[53,44363,43736],{"class":389},[53,44365,43623],{"class":59},[53,44367,44368],{"class":63}," org.apache.xmlgraphics:batik-bridge:jar:1.7:compile\n",[53,44370,44371,44373,44375,44377,44379,44381],{"class":55,"line":2890},[53,44372,43591],{"class":82},[53,44374,4459],{"class":389},[53,44376,43736],{"class":389},[53,44378,43736],{"class":389},[53,44380,43623],{"class":59},[53,44382,44383],{"class":63}," org.apache.xmlgraphics:batik-script:jar:1.7:compile\n",[53,44385,44386,44388,44390,44392,44394,44396],{"class":55,"line":2914},[53,44387,43591],{"class":82},[53,44389,4459],{"class":389},[53,44391,43736],{"class":389},[53,44393,43736],{"class":389},[53,44395,43596],{"class":59},[53,44397,44398],{"class":63}," org.apache.xmlgraphics:batik-xml:jar:1.7:compile\n",[53,44400,44401,44403,44405,44407,44409],{"class":55,"line":2925},[53,44402,43591],{"class":82},[53,44404,4459],{"class":389},[53,44406,43736],{"class":389},[53,44408,43623],{"class":59},[53,44410,44411],{"class":63}," org.apache.xmlgraphics:batik-awt-util:jar:1.7:compile\n",[53,44413,44414,44416,44418,44420,44422],{"class":55,"line":2936},[53,44415,43591],{"class":82},[53,44417,4459],{"class":389},[53,44419,43736],{"class":389},[53,44421,43623],{"class":59},[53,44423,44424],{"class":63}," org.apache.xmlgraphics:batik-gvt:jar:1.7:compile\n",[53,44426,44427,44429,44431,44433,44435],{"class":55,"line":2947},[53,44428,43591],{"class":82},[53,44430,4459],{"class":389},[53,44432,43736],{"class":389},[53,44434,43623],{"class":59},[53,44436,44437],{"class":63}," org.apache.xmlgraphics:batik-transcoder:jar:1.7:compile\n",[53,44439,44440,44442,44444,44446,44448,44450],{"class":55,"line":2958},[53,44441,43591],{"class":82},[53,44443,4459],{"class":389},[53,44445,43736],{"class":389},[53,44447,43736],{"class":389},[53,44449,43596],{"class":59},[53,44451,44452],{"class":63}," org.apache.xmlgraphics:batik-svggen:jar:1.7:compile\n",[53,44454,44455,44457,44459,44461,44463],{"class":55,"line":2970},[53,44456,43591],{"class":82},[53,44458,4459],{"class":389},[53,44460,43736],{"class":389},[53,44462,43623],{"class":59},[53,44464,44465],{"class":63}," org.apache.xmlgraphics:batik-extension:jar:1.7:compile\n",[53,44467,44468,44470,44472,44474,44476],{"class":55,"line":2977},[53,44469,43591],{"class":82},[53,44471,4459],{"class":389},[53,44473,43736],{"class":389},[53,44475,43623],{"class":59},[53,44477,44478],{"class":63}," org.apache.xmlgraphics:batik-ext:jar:1.7:compile\n",[53,44480,44481,44483,44485,44487,44489],{"class":55,"line":2988},[53,44482,43591],{"class":82},[53,44484,4459],{"class":389},[53,44486,43736],{"class":389},[53,44488,43623],{"class":59},[53,44490,44491],{"class":63}," commons-io:commons-io:jar:1.3.1:compile\n",[53,44493,44494,44496,44498,44500,44502],{"class":55,"line":2999},[53,44495,43591],{"class":82},[53,44497,4459],{"class":389},[53,44499,43736],{"class":389},[53,44501,43596],{"class":59},[53,44503,44504],{"class":63}," org.apache.avalon.framework:avalon-framework-impl:jar:4.3.1:compile\n",[53,44506,44507,44509,44511,44513],{"class":55,"line":3008},[53,44508,43591],{"class":82},[53,44510,4459],{"class":389},[53,44512,43623],{"class":59},[53,44514,44515],{"class":63}," org.apache.avalon.framework:avalon-framework-api:jar:4.3.1:compile\n",[53,44517,44518,44520,44522,44524],{"class":55,"line":3013},[53,44519,43591],{"class":82},[53,44521,4459],{"class":389},[53,44523,43623],{"class":59},[53,44525,44526],{"class":63}," poi:poi:jar:2.5.1-final-20040804:compile\n",[53,44528,44529,44531,44533,44535],{"class":55,"line":3020},[53,44530,43591],{"class":82},[53,44532,4459],{"class":389},[53,44534,43623],{"class":59},[53,44536,44537],{"class":63}," javax.servlet:servlet-api:jar:2.5:compile\n",[53,44539,44540,44542,44544,44546],{"class":55,"line":3025},[53,44541,43591],{"class":82},[53,44543,4459],{"class":389},[53,44545,43623],{"class":59},[53,44547,44548],{"class":63}," javax.mail:mail:jar:1.4.1:compile\n",[53,44550,44551,44553,44555,44557,44559],{"class":55,"line":3030},[53,44552,43591],{"class":82},[53,44554,4459],{"class":389},[53,44556,43736],{"class":389},[53,44558,43596],{"class":59},[53,44560,44561],{"class":63}," javax.activation:activation:jar:1.1.1:compile\n",[53,44563,44564,44566,44568,44570],{"class":55,"line":3042},[53,44565,43591],{"class":82},[53,44567,4459],{"class":389},[53,44569,43623],{"class":59},[53,44571,44572],{"class":63}," org.openoffice:jurt:jar:3.2.1:compile\n",[53,44574,44575,44577,44579,44581,44583],{"class":55,"line":3059},[53,44576,43591],{"class":82},[53,44578,4459],{"class":389},[53,44580,43736],{"class":389},[53,44582,43596],{"class":59},[53,44584,44585],{"class":63}," org.openoffice:ridl:jar:3.2.1:compile\n",[53,44587,44588,44590,44592,44594],{"class":55,"line":3070},[53,44589,43591],{"class":82},[53,44591,4459],{"class":389},[53,44593,43623],{"class":59},[53,44595,44596],{"class":63}," org.openoffice:unoil:jar:3.1.0:compile\n",[53,44598,44599,44601,44603,44605],{"class":55,"line":3081},[53,44600,43591],{"class":82},[53,44602,4459],{"class":389},[53,44604,43623],{"class":59},[53,44606,44607],{"class":63}," org.openoffice:juh:jar:3.1.0:compile\n",[53,44609,44610,44612,44614,44616],{"class":55,"line":3092},[53,44611,43591],{"class":82},[53,44613,4459],{"class":389},[53,44615,43596],{"class":59},[53,44617,44618],{"class":63}," ru.novosoft.dc:rtf2fo:jar:eval:compile\n",[53,44620,44621],{"class":55,"line":3103},[53,44622,44623],{"class":82},"[INFO] +- com.jgoodies:forms:jar:1.0.7:compile\n",[53,44625,44626],{"class":55,"line":3114},[53,44627,44628],{"class":82},"[INFO] +- xerces:xercesImpl:jar:2.4.0:compile\n",[53,44630,44631],{"class":55,"line":3121},[53,44632,44633],{"class":82},"[INFO] +- com.oracle:ojdbc5:jar:11.1.0.6.0:compile\n",[53,44635,44636],{"class":55,"line":3128},[53,44637,44638],{"class":82},"[INFO] +- javax.help:javahelp:jar:2.0.02:compile\n",[53,44640,44641],{"class":55,"line":3133},[53,44642,44643],{"class":82},"[INFO] +- com.example:custom-swing-framework:jar:1.1.2-SNAPSHOT:compile\n",[53,44645,44646,44648,44650,44652],{"class":55,"line":3138},[53,44647,43591],{"class":82},[53,44649,4459],{"class":389},[53,44651,43623],{"class":59},[53,44653,44654],{"class":63}," com.whirlycott:whirlycache:jar:0.7.1:compile\n",[53,44656,44657,44659,44661,44663,44665],{"class":55,"line":3150},[53,44658,43591],{"class":82},[53,44660,4459],{"class":389},[53,44662,43736],{"class":389},[53,44664,43623],{"class":59},[53,44666,44667],{"class":63}," commons-collections:commons-collections:jar:3.1:compile\n",[53,44669,44670,44672,44674,44676,44678],{"class":55,"line":3170},[53,44671,43591],{"class":82},[53,44673,4459],{"class":389},[53,44675,43736],{"class":389},[53,44677,43623],{"class":59},[53,44679,44680],{"class":63}," jdom:jdom:jar:1.0:compile\n",[53,44682,44683,44685,44687,44689,44691],{"class":55,"line":3181},[53,44684,43591],{"class":82},[53,44686,4459],{"class":389},[53,44688,43736],{"class":389},[53,44690,43596],{"class":59},[53,44692,44693],{"class":63}," concurrent:concurrent:jar:1.3.4:compile\n",[53,44695,44696,44698,44700,44702],{"class":55,"line":3192},[53,44697,43591],{"class":82},[53,44699,4459],{"class":389},[53,44701,43623],{"class":59},[53,44703,44704],{"class":63}," ehcache:ehcache:jar:0.9:compile\n",[53,44706,44707,44709,44711,44713],{"class":55,"line":3203},[53,44708,43591],{"class":82},[53,44710,4459],{"class":389},[53,44712,43623],{"class":59},[53,44714,44715],{"class":63}," org.enhydra.xmlc:xmlc:jar:2.2.7.1:compile\n",[53,44717,44718,44720,44722,44724],{"class":55,"line":3214},[53,44719,43591],{"class":82},[53,44721,4459],{"class":389},[53,44723,43623],{"class":59},[53,44725,44726],{"class":63}," com.jgoodies:looks:jar:2.2.2:compile\n",[53,44728,44729,44731,44733,44735],{"class":55,"line":3225},[53,44730,43591],{"class":82},[53,44732,4459],{"class":389},[53,44734,43623],{"class":59},[53,44736,44737],{"class":63}," org.swinglabs:swingx:jar:1.6.1:compile\n",[53,44739,44740,44742,44744,44746,44748],{"class":55,"line":3232},[53,44741,43591],{"class":82},[53,44743,4459],{"class":389},[53,44745,43736],{"class":389},[53,44747,43623],{"class":59},[53,44749,44750],{"class":63}," com.jhlabs:filters:jar:2.0.235:compile\n",[53,44752,44753,44755,44757,44759,44761],{"class":55,"line":3239},[53,44754,43591],{"class":82},[53,44756,4459],{"class":389},[53,44758,43736],{"class":389},[53,44760,43596],{"class":59},[53,44762,44763],{"class":63}," org.swinglabs:swing-worker:jar:1.1:compile\n",[53,44765,44766,44768,44770,44772],{"class":55,"line":3245},[53,44767,43591],{"class":82},[53,44769,4459],{"class":389},[53,44771,43596],{"class":59},[53,44773,44774],{"class":63}," struts:struts:jar:1.2.9:compile\n",[53,44776,44777,44779,44781,44783],{"class":55,"line":3251},[53,44778,43591],{"class":82},[53,44780,4459],{"class":389},[53,44782,43769],{"class":59},[53,44784,44785],{"class":63}," commons-beanutils:commons-beanutils:jar:1.7.0:compile\n",[53,44787,44788,44790,44792,44794],{"class":55,"line":3256},[53,44789,43591],{"class":82},[53,44791,4459],{"class":389},[53,44793,43769],{"class":59},[53,44795,44796],{"class":63}," commons-digester:commons-digester:jar:1.6:compile\n",[53,44798,44799,44801,44803,44805],{"class":55,"line":3262},[53,44800,43591],{"class":82},[53,44802,4459],{"class":389},[53,44804,43769],{"class":59},[53,44806,44807],{"class":63}," commons-fileupload:commons-fileupload:jar:1.0:compile\n",[53,44809,44810,44812,44814,44816],{"class":55,"line":3270},[53,44811,43591],{"class":82},[53,44813,4459],{"class":389},[53,44815,43769],{"class":59},[53,44817,44818],{"class":63}," commons-validator:commons-validator:jar:1.1.4:compile\n",[53,44820,44821,44823,44825,44827],{"class":55,"line":3278},[53,44822,43591],{"class":82},[53,44824,4459],{"class":389},[53,44826,43769],{"class":59},[53,44828,44829],{"class":63}," oro:oro:jar:2.0.7:compile\n",[53,44831,44833,44835,44837,44839],{"class":55,"line":44832},107,[53,44834,43591],{"class":82},[53,44836,4459],{"class":389},[53,44838,43953],{"class":59},[53,44840,44841],{"class":63}," antlr:antlr:jar:2.7.2:compile\n",[53,44843,44845],{"class":55,"line":44844},108,[53,44846,44847],{"class":82},"[INFO] +- junit:junit:jar:4.8.2:test\n",[53,44849,44851,44853,44856],{"class":55,"line":44850},109,[53,44852,43591],{"class":82},[53,44854,44855],{"class":89},"\\-",[53,44857,44858],{"class":82}," org.slf4j:slf4j-api:jar:1.6.1:compile\n",[53,44860,44862],{"class":55,"line":44861},110,[53,44863,44864],{"class":82},"[INFO] ------------------------------------------------------------------------\n",[53,44866,44868],{"class":55,"line":44867},111,[53,44869,44870],{"class":82},"[INFO] BUILD SUCCESSFUL\n",[53,44872,44874],{"class":55,"line":44873},112,[53,44875,44864],{"class":82},[53,44877,44879],{"class":55,"line":44878},113,[53,44880,44881],{"class":82},"[INFO] Total time: 2 seconds\n",[53,44883,44885],{"class":55,"line":44884},114,[53,44886,44887],{"class":82},"[INFO] Finished at: Tue Jun 28 14:10:29 CEST 2011\n",[53,44889,44891],{"class":55,"line":44890},115,[53,44892,44893],{"class":82},"[INFO] Final Memory: 22M/257M\n",[53,44895,44897],{"class":55,"line":44896},116,[53,44898,44864],{"class":82},[18,44900,44901],{},"As described before the elimination of the “rogue” logging class wasn’t a big problem. But we hadn’t reached the finish\nline yet.",[18,44903,44904,44905,44910,44911,44916,44917,44922,44923,44928,44929,44934,44935,44940],{},"The final application is being assembled into one big EAR file which contains a lot\nof ",[585,44906,44909],{"href":44907,"rel":44908},"http://docs.jboss.org/jbossas/jboss4guide/r4/html/ch5.chapter.html",[589],"EJBs",", SAR\nfiles (",[585,44912,44915],{"href":44913,"rel":44914},"http://community.jboss.org/wiki/ServiceArchive",[589],"JBoss Service Archives","), and\nthree ",[585,44918,44921],{"href":44919,"rel":44920},"http://docs.jboss.org/jbossas/jboss4guide/r4/html/ch9.chapt.html",[589],"WARs"," (web applications). By default JBoss uses\nits unified class loader (well described in the JBoss Admin Guide in\nsection ",[585,44924,44927],{"href":44925,"rel":44926},"http://docs.jboss.org/jbossas/jboss4guide/r4/html/ch2.chapter.html#d0e2490",[589],"2.2.2.4. Inside the JBoss Class Loading Architecture",")\nwhich is for several reasons not suitable for the architecture of this legacy application (using incompatible versions\nof the same library in different components for example). Thus we wanted to utilize Apache Tomcat’s class loading\nmechanism, described in\nthe ",[585,44930,44933],{"href":44931,"rel":44932},"http://tomcat.apache.org/tomcat-5.5-doc/class-loader-howto.html",[589],"Apache Tomcat 5.5 Class Loader HOW-TO",", which\nbasically isolates the deployed web applications from each other. The wiki article\non ",[585,44936,44939],{"href":44937,"rel":44938},"http://community.jboss.org/wiki/JBossClassLoadingUseCases",[589],"Advanced JBoss Class Loading"," is also very informative in\nthis context.",[18,44942,44943,44944,44949,44950,44955],{},"Since JBoss 4.0.3.SP1 is using Apache Tomcat 5.5 as servlet container it\nprovides ",[585,44945,44948],{"href":44946,"rel":44947},"http://community.jboss.org/wiki/UseJBossWebLoader",[589],"a simple configuration setting"," to achieve exactly the\nabove described behaviour. Unfortunately we encountered a lot of class loading problems when we first deployed the\napplication after the log framework refactoring. We checked the dependencies for each component but everything seemed to\nbe correct. So we started what we do best:\nset ",[585,44951,44954],{"href":44952,"rel":44953},"http://community.jboss.org/wiki/EnableClassloaderLogging",[589],"logging to DEBUG"," and engage!",[18,44957,44958,44959,44964,44965,44970,44971,986],{},"In the end some knee-deep class loader debugging hinted us in the right direction: It’s not advisable to have more than\none instance of the SLF4J JARs in your class path. Basically the class loader was complaining that the classes it was\ntrying to load (e.\ng. ",[585,44960,44963],{"href":44961,"rel":44962},"http://www.slf4j.org/apidocs/org/slf4j/spi/LoggerFactoryBinder.html",[589],"org.slf4j.spi.LoggerFactoryBinder",") had already\nbeen loaded from another repository. If you recall the details of Apache\nTomcat’s ",[585,44966,44969],{"href":44967,"rel":44968},"http://tomcat.apache.org/tomcat-5.5-doc/class-loader-howto.html#Overview",[589],"class loader"," this makes perfect\nsense but we thought that it was intelligent enough to actually share these classes. As it turns out there’s a (somewhat\nundocumented) configuration option which will exactly do this for us: ",[27,44972,44973],{},"FilteredPackages",[18,44975,44976],{},"After adding the SLF4J package prefix to our jbossweb-tomcat.sar/META-INF/jboss-service.xml file as shown below, the\napplication magically started working again.",[43,44978,44980],{"className":3792,"code":44979,"language":3794,"meta":48,"style":48}," \u003C!-- The list of package prefixes that should not be loaded without\n delegating to the parent class loader before trying the web app\n class loader. The packages listed here are those tha are used by\n the web container implementation and cannot be overriden. The format\n is a comma separated list of the package names. There cannot be any\n whitespace between the package prefixes.\n This setting only applies when UseJBossWebLoader=false.\n -->\n \u003Cattribute name=\"FilteredPackages\">javax.servlet,org.slf4j\u003C/attribute>\n",[50,44981,44982,44987,44992,44997,45002,45007,45012,45017,45022],{"__ignoreMap":48},[53,44983,44984],{"class":55,"line":56},[53,44985,44986],{}," \u003C!-- The list of package prefixes that should not be loaded without\n",[53,44988,44989],{"class":55,"line":86},[53,44990,44991],{}," delegating to the parent class loader before trying the web app\n",[53,44993,44994],{"class":55,"line":126},[53,44995,44996],{}," class loader. The packages listed here are those tha are used by\n",[53,44998,44999],{"class":55,"line":163},[53,45000,45001],{}," the web container implementation and cannot be overriden. The format\n",[53,45003,45004],{"class":55,"line":186},[53,45005,45006],{}," is a comma separated list of the package names. There cannot be any\n",[53,45008,45009],{"class":55,"line":221},[53,45010,45011],{}," whitespace between the package prefixes.\n",[53,45013,45014],{"class":55,"line":242},[53,45015,45016],{}," This setting only applies when UseJBossWebLoader=false.\n",[53,45018,45019],{"class":55,"line":273},[53,45020,45021],{}," -->\n",[53,45023,45024],{"class":55,"line":279},[53,45025,45026],{}," \u003Cattribute name=\"FilteredPackages\">javax.servlet,org.slf4j\u003C/attribute>\n",[2207,45028,45030],{"id":45029},"lessons-learned","Lessons learned",[577,45032,45033,45046,45049,45052,45055,45058,45085],{},[580,45034,45035,45036,45039,45040,45045],{},"If you’re starting a new project choose exactly one logging framework and stick with it. I recommend\nusing ",[585,45037,43350],{"href":43348,"rel":45038},[589]," for its stable API and ",[585,45041,45044],{"href":45042,"rel":45043},"http://logback.qos.ch/",[589],"Logback"," as backend at the\nmoment.",[580,45047,45048],{},"Don’t create log wrappers if they provide no additional value and if you really, really need to implement your own,\nuse the logging framework’s API instead of thinking up your own.",[580,45050,45051],{},"Dependencies over a lot of distinct components which should be assembled into a single EAR can be quite hairy. Think\nvery well about how to partition your application and which components have common dependencies which can be separated\ninto a parent POM.",[580,45053,45054],{},"The one configuration option that might solve all of your problems is not documented very well. Scan through your (\ncommented) configuration files once in a while.",[580,45056,45057],{},"Use the tools your IDE provides. It makes refactoring so much easier if you know what your IDE is capable of.",[580,45059,45060,45061,99,45066,8713,45071,8713,45076,8713,45081,11914],{},"Never underestimate the grief that class loading issues can cause\nyou. ",[585,45062,45065],{"href":45063,"rel":45064},"http://onjava.com/lpt/a/5586",[589],"Really",[585,45067,45070],{"href":45068,"rel":45069},"http://www.devx.com/Java/Article/31614/1954?pf=true",[589],"read",[585,45072,45075],{"href":45073,"rel":45074},"http://onjava.com/lpt/a/4337",[589],"up",[585,45077,45080],{"href":45078,"rel":45079},"http://onjava.com/lpt/a/5795",[589],"on",[585,45082,14763],{"href":45083,"rel":45084},"http://www.theserverside.com/news/1364680/Understanding-J2EE-Application-Server-ClassLoading-Architectures",[589],[580,45086,45087],{},"Continuous integration the way we do it is nice but it doesn’t help you with class loading issues appearing in your\nJEE application server.",[18,45089,45090,45091,45094],{},"How about you? Did you experience similar stories with large legacy applications? What did you take from it? We’re\nthrilled to hear ",[27,45092,45093],{},"your"," “Development War Stories” in the comments!",[2207,45096,45098],{"id":45097},"attributions","Attributions",[577,45100,45101],{},[580,45102,45103,43055,45108,99,45113],{},[585,45104,45107],{"href":45105,"rel":45106},"http://www.flickr.com/photos/lorenjavier/3537572809/",[589],"Seven Dwarves “Homeward Bound” statue by Jim Shore as soon from the China Closet on Main Street",[585,45109,45112],{"href":45110,"rel":45111},"http://www.flickr.com/photos/lorenjavier/",[589],"Loren Javier",[585,45114,45117],{"href":45115,"rel":45116},"http://creativecommons.org/licenses/by-nd/2.0/",[589],"CC BY-ND 2.0",[607,45119,45120],{},"html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}",{"title":48,"searchDepth":86,"depth":86,"links":45122},[45123,45124],{"id":45029,"depth":86,"text":45030},{"id":45097,"depth":86,"text":45098},[613],"2011-06-30T09:37:07","At Synyx we’re currently taking care of a rather large legacy project for one of our customers in the course of\\nour Code Clinic services. The project comprises several components\\nsuch as a fat client implemented with a custom UI framework on top of Swing, a bulky web application using a mixture of\\ncustom and obsolete frameworks, and a lot of asynchronously running jobs to process input from other systems involving\\ncustom XSL transformations and a heap of stored procedures in a Oracle 9i database. You get the picture, it’s the\\nprototype of a legacy system.","https://synyx.de/blog/the-tale-of-jboss-and-the-7-little-logging-frameworks/",{},"/blog/the-tale-of-jboss-and-the-7-little-logging-frameworks",{"title":43298,"description":45132},"At Synyx we’re currently taking care of a rather large legacy project for one of our customers in the course of\nour Code Clinic services. The project comprises several components\nsuch as a fat client implemented with a custom UI framework on top of Swing, a bulky web application using a mixture of\ncustom and obsolete frameworks, and a lot of asynchronously running jobs to process input from other systems involving\ncustom XSL transformations and a heap of stored procedures in a Oracle 9i database. You get the picture, it’s the\nprototype of a legacy system.","blog/the-tale-of-jboss-and-the-7-little-logging-frameworks",[41925,45135,45136,290,45137,45138,45139,27672],"class-loader","commons-logging","jboss","log4j","slf4j","At Synyx we’re currently taking care of a rather large legacy project for one of our customers in the course of our Code Clinic services. The project comprises several components…","ppWPHxzoN11zneoOW2aVj0eAXHnxYFD4vHMnjUtyqHs",{"id":45143,"title":45144,"author":45145,"body":45146,"category":45206,"date":45207,"description":45208,"extension":617,"link":45209,"meta":45210,"navigation":499,"path":45211,"seo":45212,"slug":45150,"stem":45214,"tags":45215,"teaser":45217,"__hash__":45218},"blog/blog/solr-as-search-engine-for-opencms.md","Solr as search engine for OpenCms",[42564],{"type":11,"value":45147,"toc":45204},[45148,45151,45166,45180,45183,45192,45201],[14,45149,45144],{"id":45150},"solr-as-search-engine-for-opencms",[18,45152,45153,45154,45159,45160,45165],{},"Matching the time of my talk at this years ",[585,45155,45158],{"href":45156,"rel":45157},"http://www.opencms-days.org/en/index.html",[589],"OpenCms Days"," we released our\nmodule for integrating ",[585,45161,45164],{"href":45162,"rel":45163},"https://github.com/synyx/opencms-solr-module",[589],"Solr with OpenCms",". A few days have passed now and\nwe had the time to polish the documentation and some aspects of the module.",[18,45167,45168,45173,45174,45179],{},[585,45169,45172],{"href":45170,"rel":45171},"http://lucene.apache.org/solr/",[589],"Solr"," is a search server that is based on the de facto standard for indexing in\nJava, ",[585,45175,45178],{"href":45176,"rel":45177},"http://lucene.apache.org/",[589],"Apache Lucene",". It provides an abstraction layer above the low level details of\nindexing and adds some useful features like facetting and synonyms.",[18,45181,45182],{},"Solr is integrated transparently as an OpenCms index and can be used and mixed with common Lucene indexes. Communication\nis done via HTTP, Solr is accessed using a REST based interface.",[18,45184,45185,45186,45191],{},"We provide two ways to try the module: You can\neither ",[585,45187,45190],{"href":45188,"rel":45189},"https://github.com/Synyx/opencms-solr-module",[589],"checkout an example application"," that you can start up immediately\nusing the OpenCms demo application TemplateTwo. This is the best place if you want to play with some configuration\noptions and just see how all of it works.",[18,45193,45194,45195,45200],{},"Another way is\nthe ",[585,45196,45199],{"href":45197,"rel":45198},"https://github.com/synyx/opencms-solr-module/wiki/Integrating-Solr-into-an-existing-application",[589],"integration in an existing application which is also described in detail",".\nThis is what you would do if you really want to use it in production.",[18,45202,45203],{},"Please feel free to post any questions or feature requests to the issue tracker.",{"title":48,"searchDepth":86,"depth":86,"links":45205},[],[613,996],"2011-05-26T15:01:54","Matching the time of my talk at this years OpenCms Days we released our\\nmodule for integrating Solr with OpenCms. A few days have passed now and\\nwe had the time to polish the documentation and some aspects of the module.","https://synyx.de/blog/solr-as-search-engine-for-opencms/",{},"/blog/solr-as-search-engine-for-opencms",{"title":45144,"description":45213},"Matching the time of my talk at this years OpenCms Days we released our\nmodule for integrating Solr with OpenCms. A few days have passed now and\nwe had the time to polish the documentation and some aspects of the module.","blog/solr-as-search-engine-for-opencms",[41377,40376,45216],"solr","Matching the time of my talk at this years OpenCms Days we released our module for integrating Solr with OpenCms. A few days have passed now and we had the…","B1O9VoFEtSOcrvIZm-OgWICsh3-nJM9l4WPhJ44dmW8",{"id":45220,"title":45221,"author":45222,"body":45223,"category":45242,"date":45243,"description":45244,"extension":617,"link":45245,"meta":45246,"navigation":499,"path":45247,"seo":45248,"slug":45249,"stem":45250,"tags":45251,"teaser":45257,"__hash__":45258},"blog/blog/opensource-is-not-just-about-the-license.md","Being Open Source instead of just Open-Sourcing or Open Source is not just about the license",[41052],{"type":11,"value":45224,"toc":45240},[45225,45228,45231,45234],[14,45226,45221],{"id":45227},"being-open-source-instead-of-just-open-sourcing-or-open-source-is-not-just-about-the-license",[18,45229,45230],{},"Open Source is not just about available sources or certain licenses. Successful Open Source projects have a community\nthat matters, not just users, strong leaders that listen and still communicate their vision and goal, growing base of\ncontributors who don’t want to be ignored, even if there’s a benevolent dictator as project lead.",[18,45232,45233],{},"There are some popular Open Source projects which recently failed in some of these areas and got forked. Synyx decided\nto go with the forks even though we are mainly users and in these cases not main contributors. What’s the reason behind\nthese decisions? Our company vision tells us to live Open Source which includes much more than what it looks on its\nsurface. It’s not a goal in itself. We believe openness, communication and trancparency are key to quality software,\nespecially long term. That leads to more deliverable value and improved competitiveness as well as more transparency to\nthe community behind.",[18,45235,45236],{},[2223,45237],{"alt":48,"src":45238,"title":45239},"https://media.synyx.de/uploads//2011/05/chili-300x199.jpg","chili",{"title":48,"searchDepth":86,"depth":86,"links":45241},[],[613,996],"2011-05-20T17:01:44","Open Source is not just about available sources or certain licenses. Successful Open Source projects have a community\\nthat matters, not just users, strong leaders that listen and still communicate their vision and goal, growing base of\\ncontributors who don’t want to be ignored, even if there’s a benevolent dictator as project lead.","https://synyx.de/blog/opensource-is-not-just-about-the-license/",{},"/blog/opensource-is-not-just-about-the-license",{"title":45221,"description":45230},"opensource-is-not-just-about-the-license","blog/opensource-is-not-just-about-the-license",[45252,4837,45253,21729,45254,41377,45255,45256],"chiliproject","hudson","libreoffice","redmine","transparency","Open Source is not just about available sources or certain licenses. Successful Open Source projects have a community that matters, not just users, strong leaders that listen and still communicate…","CCudIs0-B2Q3HB1MJjr6jXo1c__MAyAy4luOsfGU9SQ",{"id":45260,"title":45261,"author":45262,"body":45263,"category":45327,"date":45328,"description":45329,"extension":617,"link":45330,"meta":45331,"navigation":499,"path":45332,"seo":45333,"slug":45267,"stem":45335,"tags":45336,"teaser":45337,"__hash__":45338},"blog/blog/maven-and-opencms.md","Maven and OpenCms",[42564],{"type":11,"value":45264,"toc":45325},[45265,45268,45303,45306],[14,45266,45261],{"id":45267},"maven-and-opencms",[18,45269,45270,45271,45276,45277,45281,45282,45287,45288,45293,45294,45297,45298,986],{},"I ",[585,45272,45275],{"href":45273,"rel":45274},"http://blog.synyx.de/2010/11/netbeans-and-opencms/",[589],"previously mentioned"," that setting up a development environment\nfor ",[585,45278,41231],{"href":45279,"rel":45280},"http://opencms.org/",[589]," can be quite hard. Besides\nour ",[585,45283,45286],{"href":45284,"rel":45285},"https://github.com/synyx/opencms-netbeans-module",[589],"Netbeans module"," we are using a custom maven plugin for some time\nnow. As we gain a lot of benefit by building our modules from the file system it’s time to release it and see if other\npeople also want to use it. It’s based on an Ant task that has originally been released\nby ",[585,45289,45292],{"href":45290,"rel":45291},"http://www.eurelis.com",[589],"Eurelis",". Today we ",[27,45295,45296],{},"released version 1.0"," which is now available under the terms of\nthe ",[585,45299,45302],{"href":45300,"rel":45301},"http://www.fsf.org/licensing/licenses/lgpl.txt",[589],"GNU Lesser General Public License",[18,45304,45305],{},"The plugin builds OpenCms module zip files from a Maven directory structure and can import these automatically in a\nrunning instance. You don’t have to edit any files in the OpenCms workplace anymore, the local filesystem is the basis\nfor your module. Use the archetype to set up a complete installation of OpenCms 7.5.2 or 7.5.4 in no time.",[18,45307,45308,45309,45313,45314,45319,45320,986],{},"You can find the projects homepage including documentation how to get started and how everything\nworks ",[585,45310,10819],{"href":45311,"rel":45312},"https://github.com/synyx/maven-opencms/",[589],". You should not get problems to get up and running after reading\nthe information from the projects ",[585,45315,45318],{"href":45316,"rel":45317},"https://github.com/synyx/maven-opencms/wiki",[589],"Wiki",". If you’re having any trouble or\nfeature request feel free to contact us or create a ticket in the\nprojects ",[585,45321,45324],{"href":45322,"rel":45323},"https://github.com/synyx/maven-opencms/issues",[589],"issue tracker",{"title":48,"searchDepth":86,"depth":86,"links":45326},[],[613,996],"2011-04-08T17:39:59","I previously mentioned that setting up a development environment\\nfor OpenCms can be quite hard. Besides\\nour Netbeans module we are using a custom maven plugin for some time\\nnow. As we gain a lot of benefit by building our modules from the file system it’s time to release it and see if other\\npeople also want to use it. It’s based on an Ant task that has originally been released\\nby Eurelis. Today we released version 1.0 which is now available under the terms of\\nthe GNU Lesser General Public License.","https://synyx.de/blog/maven-and-opencms/",{},"/blog/maven-and-opencms",{"title":45261,"description":45334},"I previously mentioned that setting up a development environment\nfor OpenCms can be quite hard. Besides\nour Netbeans module we are using a custom maven plugin for some time\nnow. As we gain a lot of benefit by building our modules from the file system it’s time to release it and see if other\npeople also want to use it. It’s based on an Ant task that has originally been released\nby Eurelis. Today we released version 1.0 which is now available under the terms of\nthe GNU Lesser General Public License.","blog/maven-and-opencms",[10891,40376],"I previously mentioned that setting up a development environment for OpenCms can be quite hard. Besides our Netbeans module we are using a custom maven plugin for some time now.…","xHLhHj4ODShymebsyF1zyo6xejQxvqb3ILdPFOcloUA",{"id":45340,"title":45341,"author":45342,"body":45343,"category":45786,"date":45787,"description":45350,"extension":617,"link":45788,"meta":45789,"navigation":499,"path":45790,"seo":45791,"slug":45792,"stem":45793,"tags":45794,"teaser":45803,"__hash__":45804},"blog/blog/utilizing-git-to-dive-into-huge-code-bases.md","Utilizing Git to dive into huge code bases – Git SVN Tips",[41052],{"type":11,"value":45344,"toc":45784},[45345,45348,45351,45354,45357,45371,45374,45377,45380,45383,45386,45389,45392,45395,45398,45401,45404,45407,45410,45413,45416,45419,45422,45436,45466,45469,45472,45475,45489,45491,45505,45508,45511,45537,45540,45583,45586,45589,45592,45611,45639,45642,45645,45648,45651,45675,45678,45692,45719,45722,45725,45728,45731,45754,45762,45776,45779,45782],[14,45346,45341],{"id":45347},"utilizing-git-to-dive-into-huge-code-bases-git-svn-tips",[18,45349,45350],{},"Unfortunately there are still projects not on dvsc like git. That’s especially true",[18,45352,45353],{},"for enterprise customers which are at least stuck on Subversion if not worse.",[18,45355,45356],{},"So the first thing I do on new projects I join:",[43,45358,45360],{"className":13786,"code":45359,"language":13788,"meta":48,"style":48},"\n git svn clone -s svn-url\n\n",[50,45361,45362,45366],{"__ignoreMap":48},[53,45363,45364],{"class":55,"line":56},[53,45365,500],{"emptyLinePlaceholder":499},[53,45367,45368],{"class":55,"line":86},[53,45369,45370],{}," git svn clone -s svn-url\n",[18,45372,45373],{},"Or installing Git if I have to work on customer provided machines. That’s even more",[18,45375,45376],{},"important than the rest of a development environment like an IDE.",[18,45378,45379],{},"From experience I find it especially useful to experiment with new code basis",[18,45381,45382],{},"utilizing Git. Grown and big projects aren’t easy to understand architecturally and",[18,45384,45385],{},"implementation wise without digging deep. With the help of Git you can jump right",[18,45387,45388],{},"in, without fear and without messing everything up or having too much unrevertable",[18,45390,45391],{},"local changes. Just commit early and often! By doing it locally, in experimental",[18,45393,45394],{},"branches you can try and learn. Before you publish something to a wider audience",[18,45396,45397],{},"(svn) you can reorder, cherrypick and change everything or parts of it. Git is my",[18,45399,45400],{},"tool of choice to get my hands dirty with legacy code (new one too of course).",[18,45402,45403],{},"Some useful tips on how I use Git-SVN:",[18,45405,45406],{},"SVN history is linear, so you can’t use branches and merge the usual git-way without",[18,45408,45409],{},"thinking.",[18,45411,45412],{},"What often happens to me is that I implement a new feature, do some refactorings on",[18,45414,45415],{},"my way etc and an urgant bug report comes along. But I commited on master, don’t",[18,45417,45418],{},"want to push it to SVN yet since it’s not finished yet and might not be stable. What",[18,45420,45421],{},"to do? git svn dcommit would push all my local master commits to svn. The solution:",[43,45423,45425],{"className":13786,"code":45424,"language":13788,"meta":48,"style":48},"\n git branch featureA\n\n",[50,45426,45427,45431],{"__ignoreMap":48},[53,45428,45429],{"class":55,"line":56},[53,45430,500],{"emptyLinePlaceholder":499},[53,45432,45433],{"class":55,"line":86},[53,45434,45435],{}," git branch featureA\n",[43,45437,45439],{"className":13786,"code":45438,"language":13788,"meta":48,"style":48},"\u003Ctt>\n |svn | master\n ---o---o---o---o---o---o---o---o---o---o\n | featureA\n\u003C/tt>\n",[50,45440,45441,45446,45451,45456,45461],{"__ignoreMap":48},[53,45442,45443],{"class":55,"line":56},[53,45444,45445],{},"\u003Ctt>\n",[53,45447,45448],{"class":55,"line":86},[53,45449,45450],{}," |svn | master\n",[53,45452,45453],{"class":55,"line":126},[53,45454,45455],{}," ---o---o---o---o---o---o---o---o---o---o\n",[53,45457,45458],{"class":55,"line":163},[53,45459,45460],{}," | featureA\n",[53,45462,45463],{"class":55,"line":186},[53,45464,45465],{},"\u003C/tt>\n",[18,45467,45468],{},"Now both branches featureA and master point to the latest commit. But we want master",[18,45470,45471],{},"to point to an earlier commit. Let’s say the last 10 commits aren’t in SVN yet and",[18,45473,45474],{},"the last 8 are experimental, so 2 could be pushed.",[43,45476,45478],{"className":13786,"code":45477,"language":13788,"meta":48,"style":48},"\n git reset --hard HEAD~8\n\n",[50,45479,45480,45484],{"__ignoreMap":48},[53,45481,45482],{"class":55,"line":56},[53,45483,500],{"emptyLinePlaceholder":499},[53,45485,45486],{"class":55,"line":86},[53,45487,45488],{}," git reset --hard HEAD~8\n",[18,45490,41343],{},[43,45492,45494],{"className":13786,"code":45493,"language":13788,"meta":48,"style":48},"\n git reset --hard sha-hash-of-commit-to-point-to\n\n",[50,45495,45496,45500],{"__ignoreMap":48},[53,45497,45498],{"class":55,"line":56},[53,45499,500],{"emptyLinePlaceholder":499},[53,45501,45502],{"class":55,"line":86},[53,45503,45504],{}," git reset --hard sha-hash-of-commit-to-point-to\n",[18,45506,45507],{},"Now my master is in the state it was in 8 commits ago and my experimental changes",[18,45509,45510],{},"are still in featureA branch.",[43,45512,45514],{"className":13786,"code":45513,"language":13788,"meta":48,"style":48},"\u003Ctt>\n |svn | master\n ---o---o---o---o---o---o---o---o---o---o\n | featureA\n\u003C/tt>\n",[50,45515,45516,45520,45525,45529,45533],{"__ignoreMap":48},[53,45517,45518],{"class":55,"line":56},[53,45519,45445],{},[53,45521,45522],{"class":55,"line":86},[53,45523,45524],{}," |svn | master\n",[53,45526,45527],{"class":55,"line":126},[53,45528,45455],{},[53,45530,45531],{"class":55,"line":163},[53,45532,45460],{},[53,45534,45535],{"class":55,"line":186},[53,45536,45465],{},[18,45538,45539],{},"I can continue with fixing that critical bug, commit and svn dcommit. Have a look on how your history look with gitk\n–all.",[43,45541,45543],{"className":13786,"code":45542,"language":13788,"meta":48,"style":48},"\u003Ctt>\n | svn\n ---o---o---o master\n /\n ---o---o---o\n \\---o---o---o---o---o---o---o\n | featureA\n\u003C/tt>\n",[50,45544,45545,45549,45554,45559,45564,45569,45574,45579],{"__ignoreMap":48},[53,45546,45547],{"class":55,"line":56},[53,45548,45445],{},[53,45550,45551],{"class":55,"line":86},[53,45552,45553],{}," | svn\n",[53,45555,45556],{"class":55,"line":126},[53,45557,45558],{}," ---o---o---o master\n",[53,45560,45561],{"class":55,"line":163},[53,45562,45563],{}," /\n",[53,45565,45566],{"class":55,"line":186},[53,45567,45568],{}," ---o---o---o\n",[53,45570,45571],{"class":55,"line":221},[53,45572,45573],{}," \\---o---o---o---o---o---o---o\n",[53,45575,45576],{"class":55,"line":242},[53,45577,45578],{}," | featureA\n",[53,45580,45581],{"class":55,"line":273},[53,45582,45465],{},[18,45584,45585],{},"Dcommit rebased 3 commits and especially if",[18,45587,45588],{},"there were some more upstream svn commits, I want to base my experimental stuff",[18,45590,45591],{},"ontop of this. So I do a",[43,45593,45595],{"className":13786,"code":45594,"language":13788,"meta":48,"style":48},"\n git checkout featureA\n git rebase master\n\n",[50,45596,45597,45601,45606],{"__ignoreMap":48},[53,45598,45599],{"class":55,"line":56},[53,45600,500],{"emptyLinePlaceholder":499},[53,45602,45603],{"class":55,"line":86},[53,45604,45605],{}," git checkout featureA\n",[53,45607,45608],{"class":55,"line":126},[53,45609,45610],{}," git rebase master\n",[43,45612,45614],{"className":13786,"code":45613,"language":13788,"meta":48,"style":48},"\u003Ctt>\n | master\n ---o---o---o---o---o---o---o---o---o---o---o\n | svn | featureA\n\u003C/tt>\n",[50,45615,45616,45620,45625,45630,45635],{"__ignoreMap":48},[53,45617,45618],{"class":55,"line":56},[53,45619,45445],{},[53,45621,45622],{"class":55,"line":86},[53,45623,45624],{}," | master\n",[53,45626,45627],{"class":55,"line":126},[53,45628,45629],{}," ---o---o---o---o---o---o---o---o---o---o---o\n",[53,45631,45632],{"class":55,"line":163},[53,45633,45634],{}," | svn | featureA\n",[53,45636,45637],{"class":55,"line":186},[53,45638,45465],{},[18,45640,45641],{},"Even if I could live without the upstream changes on my featureA branch for now, I’d",[18,45643,45644],{},"need a rebase later anyway, so I can do it in advance. That’s because the history",[18,45646,45647],{},"wouldn’t be linear anymore by doing a three-way merge of my featureA into master without rebasing.",[18,45649,45650],{},"When I’m satisfied and with featureA and nothing changed in master I can",[43,45652,45654],{"className":13786,"code":45653,"language":13788,"meta":48,"style":48},"\n git checkout master\n git merge featureA\n git branch -d featureA\n\n",[50,45655,45656,45660,45665,45670],{"__ignoreMap":48},[53,45657,45658],{"class":55,"line":56},[53,45659,500],{"emptyLinePlaceholder":499},[53,45661,45662],{"class":55,"line":86},[53,45663,45664],{}," git checkout master\n",[53,45666,45667],{"class":55,"line":126},[53,45668,45669],{}," git merge featureA\n",[53,45671,45672],{"class":55,"line":163},[53,45673,45674],{}," git branch -d featureA\n",[18,45676,45677],{},"And since it’s a fast-forward merge can continue to push it to SVN",[43,45679,45681],{"className":13786,"code":45680,"language":13788,"meta":48,"style":48},"\n git svn dcommit\n\n",[50,45682,45683,45687],{"__ignoreMap":48},[53,45684,45685],{"class":55,"line":56},[53,45686,500],{"emptyLinePlaceholder":499},[53,45688,45689],{"class":55,"line":86},[53,45690,45691],{}," git svn dcommit\n",[43,45693,45695],{"className":13786,"code":45694,"language":13788,"meta":48,"style":48},"\u003Ctt>\n | master\n ---o---o---o---o---o---o---o---o---o---o---o\n | svn\n\u003C/tt>\n",[50,45696,45697,45701,45706,45710,45715],{"__ignoreMap":48},[53,45698,45699],{"class":55,"line":56},[53,45700,45445],{},[53,45702,45703],{"class":55,"line":86},[53,45704,45705],{}," | master\n",[53,45707,45708],{"class":55,"line":126},[53,45709,45629],{},[53,45711,45712],{"class":55,"line":163},[53,45713,45714],{}," | svn\n",[53,45716,45717],{"class":55,"line":186},[53,45718,45465],{},[18,45720,45721],{},"If something did change in master I just do another rebase before the merge.",[18,45723,45724],{},"If I come to the conclusion that my experimental branch was just for learning",[18,45726,45727],{},"purpose and only one or two useful refactoring or unit-test improving commits I take",[18,45729,45730],{},"only these to master and abandon the branch.",[43,45732,45734],{"className":13786,"code":45733,"language":13788,"meta":48,"style":48},"\n git checkout master\n git cherry-pick sha-of-one-commit\n git cherry-pick sha-of-another\n\n",[50,45735,45736,45740,45744,45749],{"__ignoreMap":48},[53,45737,45738],{"class":55,"line":56},[53,45739,500],{"emptyLinePlaceholder":499},[53,45741,45742],{"class":55,"line":86},[53,45743,45664],{},[53,45745,45746],{"class":55,"line":126},[53,45747,45748],{}," git cherry-pick sha-of-one-commit\n",[53,45750,45751],{"class":55,"line":163},[53,45752,45753],{}," git cherry-pick sha-of-another\n",[18,45755,45756,45757],{},"If I’m overall satisfied with the results of my experimental branch, but not with commit messages, how the commits are\nordered and maybe their scope, I use git\nrebases ",[585,45758,45761],{"href":45759,"rel":45760},"http://www.kernel.org/pub/software/scm/git/docs/git-rebase.html#_interactive_mode",[589],"interactive mode",[43,45763,45765],{"className":13786,"code":45764,"language":13788,"meta":48,"style":48},"\n git rebase -i sha-after-this-commit\n\n",[50,45766,45767,45771],{"__ignoreMap":48},[53,45768,45769],{"class":55,"line":56},[53,45770,500],{"emptyLinePlaceholder":499},[53,45772,45773],{"class":55,"line":86},[53,45774,45775],{}," git rebase -i sha-after-this-commit\n",[18,45777,45778],{},"reordering commits, splitting commits, editing commit messages, squashing multiple commits together.",[18,45780,45781],{},"There are endless more possibilities to get a better grip on your code-base.",[607,45783,989],{},{"title":48,"searchDepth":86,"depth":86,"links":45785},[],[613],"2011-03-21T06:22:53","https://synyx.de/blog/utilizing-git-to-dive-into-huge-code-bases/",{},"/blog/utilizing-git-to-dive-into-huge-code-bases",{"title":45341,"description":45350},"utilizing-git-to-dive-into-huge-code-bases","blog/utilizing-git-to-dive-into-huge-code-bases",[45795,8207,45796,6884,45797,45798,45799,45800,45801,45802],"dvcs","productivity","subversion","svn","tips","vcs","versioncontrol","withoutfear","Unfortunately there are still projects not on dvsc like git. That’s especially true for enterprise customers which are at least stuck on Subversion if not worse. So the first thing…","jY0wlPCOmzUfKbhiV6O6parFyGSU5NhgtbhpJnpQyIQ",{"id":45806,"title":45807,"author":45808,"body":45809,"category":45859,"date":45860,"description":45861,"extension":617,"link":45862,"meta":45863,"navigation":499,"path":45864,"seo":45865,"slug":45813,"stem":45867,"tags":45868,"teaser":45869,"__hash__":45870},"blog/blog/solr-summit-frankfurt.md","Solr Summit Frankfurt",[42564],{"type":11,"value":45810,"toc":45857},[45811,45814,45827,45836,45845,45854],[14,45812,45807],{"id":45813},"solr-summit-frankfurt",[18,45815,45816,45817,45822,45823,45826],{},"I just returned\nfrom ",[585,45818,45821],{"href":45819,"rel":45820},"http://www.lucidimagination.com/blog/2011/01/31/solr-summit-series-comes-to-germany-munich23-feb-frankfurt24-feb-2011-2/",[589],"Solr Summit","\nin Frankfurt, a half day mini conference about ",[585,45824,45172],{"href":45170,"rel":45825},[589],", the search server based on Apache\nLucene. It has been a really worthwile event with a lot of insight into large scale implementations of Solr.",[18,45828,45829,45830,45835],{},"The first half of the conference Marc Krellenstein, a Co-Founder\nof ",[585,45831,45834],{"href":45832,"rel":45833},"http://www.lucidimagination.com/",[589],"Lucid Imagination",", presented trends in enterprise search as well as Lucids\ncommercial Solr environment Lucid Works Enterprise. After an outline of the history of search systems he presented\ndifferent characteristics of a successful search system. Though being held by someone who is obviously biased towards\nSolr and Lucene he also summarized where commercial search systems like Autonomy and Fast have their strengths, good to\nhave an insight into competing systems.",[18,45837,45838,45839,45844],{},"Afterwards Oliver Schönherr and Thomas Kwiatkowski spoke on how Solr is used\nat ",[585,45840,45843],{"href":45841,"rel":45842},"http://www.immobilienscout24.de",[589],"Immobilienscout24",", where it powers full text search. Solr had been selected after\nan evaluation period where commercial as well as non-commercial systems were compared. The way Solr is used probably is\nnot a common use case. IS24 uses a custom build search system for doing their structured search, where you basically\nrefine the search results using different form fields. Solr is used to search within this result list by intersecting\nthe Solr search results with the results of the legacy system. They are using a plain Solr 1.4 without any patches and\nonly two additional components, a scheduler for the data import handler that indexes a database and a component that\nprovides fast access to only the ids of documents because that’s all that is needed for the intersection.",[18,45846,45847,45848,45853],{},"The last talk was held by Olaf Zschiedrich of ",[585,45849,45852],{"href":45850,"rel":45851},"https://www.ebay-kleinanzeigen.de/",[589],"eBay Kleinanzeigen",", formerly known\nas Kijiji. eBay Kleinanzeigen seems to use nearly all features that Solr has to offer, most notably facetting,\nautocompletion and more-like-this for displaying related articles. The site is being developed by a relatively small\nteam and seems to be blazing fast though there are lots of hits on Solr, on peak times 1500 requests/s. Of course this\nis only possible as Solr is designed to be scalable by means of its replication features, its internal caching and the\nexternal caching support through ETags. At eBay Kleinanzeigen there are 12 Solr instances that are used for searching\nbut according to Olaf, 8 would still be enough to keep the resource consumption under 50%.",[18,45855,45856],{},"All of the talks were really interesting, Olaf Zschiedrichs being the one with the most laughters. I have learned a lot\nand appreciate the time and costs Lucid Imagination and its partners have invested to make this event possible.",{"title":48,"searchDepth":86,"depth":86,"links":45858},[],[613,996],"2011-02-24T20:34:30","I just returned\\nfrom Solr Summit\\nin Frankfurt, a half day mini conference about Solr, the search server based on Apache\\nLucene. It has been a really worthwile event with a lot of insight into large scale implementations of Solr.","https://synyx.de/blog/solr-summit-frankfurt/",{},"/blog/solr-summit-frankfurt",{"title":45807,"description":45866},"I just returned\nfrom Solr Summit\nin Frankfurt, a half day mini conference about Solr, the search server based on Apache\nLucene. It has been a really worthwile event with a lot of insight into large scale implementations of Solr.","blog/solr-summit-frankfurt",[7721,45216],"I just returned from Solr Summit in Frankfurt, a half day mini conference about Solr, the search server based on Apache Lucene. It has been a really worthwile event with…","kPs7DFsBho3bz4Mub2Pfnm_Wm3cs2gZQU5AYzAkVY8Y",{"id":45872,"title":45873,"author":45874,"body":45875,"category":45930,"date":45931,"description":45932,"extension":617,"link":45933,"meta":45934,"navigation":499,"path":45935,"seo":45936,"slug":45879,"stem":45938,"tags":45939,"teaser":45943,"__hash__":45944},"blog/blog/synyx-messagesource-load-your-i18n-messages-from-database.md","Synyx MessageSource: Load your i18n messages from database",[12981],{"type":11,"value":45876,"toc":45928},[45877,45880,45888,45895,45910],[14,45878,45873],{"id":45879},"synyx-messagesource-load-your-i18n-messages-from-database",[18,45881,45882,45883,45887],{},"A while ago we wanted to store internationalisation for a project in database to allow (a subset of) users to create and\nupdate internationalisation using the application itself. When we searched the web for existing projects that allow to\ndo this we did not find a good and ready to use solution. This is why we decided to write the code ourselves and make it\navailable to others, especially since there was some public demand and it will probably not be shipped by SpringSource (\ncheck out ",[585,45884,34138],{"href":45885,"rel":45886},"http://www.google.de/search?q=spring+i18n+database",[589]," for details).",[18,45889,45890,45891,45894],{},"So today I’d like to announce our new Open Source project ",[27,45892,45893],{},"Synyx Messagesource for Spring"," business-friendly\npublished using Apache License, Version 2.0.",[18,45896,45897,45898,45901,45902,45905,45906,45909],{},"When you want to store internationalisation of your Spring-backed application in database, the project is the right\nthing to use. It provides an implementation of Springs ",[50,45899,45900],{},"MessageSource"," interface that is able to load and cache a set of\nmessages at once using a ",[50,45903,45904],{},"MessageProvider",". The project brings a configurable one that is able to read (and write) your\ni18n to database using JDBC. There is also support to import and export your messages to the “standard” i18n\n",[50,45907,45908],{},".properties"," files.",[18,45911,45308,45912,45916,45917,45922,45923,986],{},[585,45913,10819],{"href":45914,"rel":45915},"http://messagesource.synyx.org",[589],". You should not get problems to get up and running after reading the\ninformation from the ",[585,45918,45921],{"href":45919,"rel":45920},"https://github.com/synyx/messagesource/wiki",[589],"projects Wiki",". If you’re having any trouble or\nfeature request feel free to contact us\nor ",[585,45924,45927],{"href":45925,"rel":45926},"https://github.com/synyx/messagesource/issues",[589],"create a ticket in the projects issue tracker",{"title":48,"searchDepth":86,"depth":86,"links":45929},[],[613,996],"2011-02-14T18:06:55","A while ago we wanted to store internationalisation for a project in database to allow (a subset of) users to create and\\nupdate internationalisation using the application itself. When we searched the web for existing projects that allow to\\ndo this we did not find a good and ready to use solution. This is why we decided to write the code ourselves and make it\\navailable to others, especially since there was some public demand and it will probably not be shipped by SpringSource (\\ncheck out Google for details).","https://synyx.de/blog/synyx-messagesource-load-your-i18n-messages-from-database/",{},"/blog/synyx-messagesource-load-your-i18n-messages-from-database",{"title":45873,"description":45937},"A while ago we wanted to store internationalisation for a project in database to allow (a subset of) users to create and\nupdate internationalisation using the application itself. When we searched the web for existing projects that allow to\ndo this we did not find a good and ready to use solution. This is why we decided to write the code ourselves and make it\navailable to others, especially since there was some public demand and it will probably not be shipped by SpringSource (\ncheck out Google for details).","blog/synyx-messagesource-load-your-i18n-messages-from-database",[30643,45940,45941,45942,41377,1010],"i18n","internationalisation","internationalization","A while ago we wanted to store internationalisation for a project in database to allow (a subset of) users to create and update internationalisation using the application itself. When we…","nyZcMPq1I7-esYkZoVwAgntWZKJrIJkCdwQvpagdS20",{"id":45946,"title":45947,"author":45948,"body":45949,"category":46031,"date":46032,"description":46033,"extension":617,"link":46034,"meta":46035,"navigation":499,"path":46036,"seo":46037,"slug":45953,"stem":46039,"tags":46040,"teaser":46042,"__hash__":46043},"blog/blog/spring-ide-into-eclipse.md","Spring IDE into eclipse",[41600],{"type":11,"value":45950,"toc":46029},[45951,45954,45969,45984,45987,45990,45995,46010,46013,46016],[14,45952,45947],{"id":45953},"spring-ide-into-eclipse",[18,45955,45956,45957,45962,45963,45968],{},"Today, I tried to install parts of the ",[585,45958,45961],{"href":45959,"rel":45960},"http://www.springsource.com/developer/sts",[589],"SpringSource Tool Suite","\ninto ",[585,45964,45967],{"href":45965,"rel":45966},"http://www.eclipse.org/",[589],"Eclipse"," Helios SR1 via update-site.",[18,45970,45971,45972,45978,45979,45983],{},"After finding the right update-site of the STS for version 3.6 of eclipse (namely: *\n",[573,45973,45974],{},[585,45975,45976],{"href":45976,"rel":45977},"http://dist.springsource.com/release/TOOLS/update/e3.6",[589],"* and * *",[585,45980,45981],{"href":45981,"rel":45982},"http://dist.springsource.com/release/TOOLS/composite/e3.6",[589],"**) and following the installation instructions carefully, I\nhad Spring IDE 2.5.2 successfully integrated.",[18,45985,45986],{},"But after restarting eclipse I was shocked, it seemed that the update-manager broke down…",[18,45988,45989],{},"when opening the update-manager the log gasped out:",[18,45991,1038,45992,1042],{},[573,45993,45994],{},"An internal error occurred during: Contacting Software Sites. java.lang.NullPointerException…",[18,45996,45997,45998,46003,46004,46009],{},"didn’t sound good. After quite a lot of researching and try and failing, I was very glad to find\na ",[585,45999,46002],{"href":46000,"rel":46001},"https://jira.springsource.org/browse/IDE-1163?page=com.atlassian.jira.plugin.system.issuetabpanels%3Acomment-tabpanel#issue-tabs",[589],"bug report",",\nwhich mentioned that this occurs on updating Spring IDE. Thanks\nto ",[585,46005,46008],{"href":46006,"rel":46007},"http://www.springsource.com/people/cdupuis",[589],"Christian Dupuis"," from SpringSource, who provided a workaround\nyesterday. For some reason a second org.apache.commons.httpclient_3.1.0….jar was shipped and confused eclipse. But\nafter commenting one of them out in “configuration/org.eclipse.equinox.simpleconfigurator/bundles.info”",[18,46011,46012],{},"everything was working well again.",[18,46014,46015],{},"Now I can enjoy my Spring IDE and perhaps you can so, too.",[18,46017,46018,46019,46022,46023,46028],{},"edit: in the meantime ",[585,46020,46008],{"href":46006,"rel":46021},[589]," already delivered\na ",[585,46024,46027],{"href":46025,"rel":46026},"http://dist.springsource.com/release/TOOLS/update/2.5.2.SR1/e3.6/springsource-tool-suite-2.5.2.RELEASE-e3.6-updatesite.zip",[589],"quickly fixed update-archive","\nwhich makes the file manipulating superfluous… Hell these guys are quite fast!",{"title":48,"searchDepth":86,"depth":86,"links":46030},[],[613],"2011-01-12T18:08:02","Today, I tried to install parts of the SpringSource Tool Suite\\ninto Eclipse Helios SR1 via update-site.","https://synyx.de/blog/spring-ide-into-eclipse/",{},"/blog/spring-ide-into-eclipse",{"title":45947,"description":46038},"Today, I tried to install parts of the SpringSource Tool Suite\ninto Eclipse Helios SR1 via update-site.","blog/spring-ide-into-eclipse",[14573,46041,1010],"eclipse","Today, I tried to install parts of the SpringSource Tool Suite into Eclipse Helios SR1 via update-site. After finding the right update-site of the STS for version 3.6 of eclipse…","pYIVv5Z_1S__iGMQhek2t48OhdaNC3Lw0q-CD9EVE-M",{"id":46045,"title":46046,"author":46047,"body":46048,"category":46919,"date":46920,"description":46921,"extension":617,"link":46922,"meta":46923,"navigation":499,"path":46924,"seo":46925,"slug":46052,"stem":46927,"tags":46928,"teaser":46929,"__hash__":46930},"blog/blog/scripting-opencms.md","Scripting OpenCms",[42564],{"type":11,"value":46049,"toc":46917},[46050,46053,46066,46073,46085,46435,46438,46540,46543,46566,46569,46592,46595,46600,46603,46911,46914],[14,46051,46046],{"id":46052},"scripting-opencms",[18,46054,46055,46059,46060,46065],{},[585,46056,41231],{"href":46057,"rel":46058},"http://opencms.org/de/",[589]," ships with a shell script for accessing the virtual file system from the command line.\nThis is useful for a lot of administrative tasks like importing modules or exporting content. You can supply scripts\nthat issue commands or use the shell interactively. As the syntax for the scripts is quite limited some tasks can’t be\nachieved that easily. ",[585,46061,46064],{"href":46062,"rel":46063},"http://blog.codecentric.de/en/2010/04/how-to-automate-opencms-module-import/",[589],"This blogpost","\ndescribes a way to circumvent these problems by generating the script files.",[18,46067,46068,46069,46072],{},"I will present a simple extension of this mechanism that makes it possible to access the VFS using\nthe ",[585,46070,21472],{"href":41557,"rel":46071},[589]," scripting language that also runs on the Java Virtual Machine. It can be useful\nto import users or content from different systems, create content for test runs or any other task you can think of.",[18,46074,46075,46076,46079,46080,46084],{},"The original script file for accessing the cms shell is located in ",[50,46077,46078],{},"WEB-INF/cmsshell.sh"," (\nSee ",[585,46081,10819],{"href":46082,"rel":46083},"http://blog.synyx.de/2010/11/using-cmsshell-on-ubuntu-systems/",[589]," on how to make it work on Ubuntu systems).\nThe following file is a modification of the original file that doesn’t call the underlying Java class but another groovy\nscript and should be placed next to the original:",[43,46086,46088],{"className":30754,"code":46087,"language":30756,"meta":48,"style":48},"\n#!/bin/sh\n# Script for evaluating groovy scripts with access to opencms.\n#\n# Please make sure that \"servlet-api.jar\" and \"jsp-api.jar\" are found.\n#\n# get path to opencms base directory\nOPENCMS_BASE=`pwd`\n# get path to tomcat home\nif [ -z \"$TOMCAT_HOME\" ]; then\n [ -n \"$CATALINA_HOME\" ] && TOMCAT_HOME=\"$CATALINA_HOME\"\n [ -z \"$TOMCAT_HOME\" ] && TOMCAT_HOME=\"$OPENCMS_BASE\"/../../..\nfi\nTOMCAT_CLASSPATH=\"\"\n# Support for tomcat 5\nfor JAR in ${TOMCAT_HOME}/common/lib/*.jar; do\n TOMCAT_CLASSPATH=\"${TOMCAT_CLASSPATH}:${JAR}\"\ndone\nfor JAR in ${TOMCAT_HOME}/shared/lib/*.jar; do\n TOMCAT_CLASSPATH=\"${TOMCAT_CLASSPATH}:${JAR}\"\ndone\n# Support for tomcat 6\nfor JAR in ${TOMCAT_HOME}/lib/*.jar; do\n TOMCAT_CLASSPATH=\"${TOMCAT_CLASSPATH}:${JAR}\"\ndone\nOPENCMS_CLASSPATH=\"\"\nfor JAR in ${OPENCMS_BASE}/lib/*.jar; do\n OPENCMS_CLASSPATH=\"${OPENCMS_CLASSPATH}:${JAR}\"\ndone\ngroovy -classpath \"${OPENCMS_CLASSPATH}:${TOMCAT_CLASSPATH}:classes\" evalscripts.groovy -base=\"${OPENCMS_BASE}\" \"$@\"\n\n",[50,46089,46090,46094,46098,46103,46107,46112,46116,46121,46136,46141,46160,46185,46209,46213,46223,46228,46247,46267,46271,46288,46304,46308,46313,46330,46346,46350,46359,46376,46393,46397],{"__ignoreMap":48},[53,46091,46092],{"class":55,"line":56},[53,46093,500],{"emptyLinePlaceholder":499},[53,46095,46096],{"class":55,"line":86},[53,46097,4325],{"class":3698},[53,46099,46100],{"class":55,"line":126},[53,46101,46102],{"class":3698},"# Script for evaluating groovy scripts with access to opencms.\n",[53,46104,46105],{"class":55,"line":163},[53,46106,31024],{"class":3698},[53,46108,46109],{"class":55,"line":186},[53,46110,46111],{"class":3698},"# Please make sure that \"servlet-api.jar\" and \"jsp-api.jar\" are found.\n",[53,46113,46114],{"class":55,"line":221},[53,46115,31024],{"class":3698},[53,46117,46118],{"class":55,"line":242},[53,46119,46120],{"class":3698},"# get path to opencms base directory\n",[53,46122,46123,46126,46128,46130,46133],{"class":55,"line":273},[53,46124,46125],{"class":82},"OPENCMS_BASE",[53,46127,390],{"class":389},[53,46129,36715],{"class":63},[53,46131,46132],{"class":89},"pwd",[53,46134,46135],{"class":63},"`\n",[53,46137,46138],{"class":55,"line":279},[53,46139,46140],{"class":3698},"# get path to tomcat home\n",[53,46142,46143,46145,46147,46149,46151,46154,46156,46158],{"class":55,"line":496},[53,46144,4334],{"class":389},[53,46146,4104],{"class":82},[53,46148,31285],{"class":389},[53,46150,6452],{"class":63},[53,46152,46153],{"class":82},"$TOMCAT_HOME",[53,46155,2385],{"class":63},[53,46157,31295],{"class":82},[53,46159,6467],{"class":389},[53,46161,46162,46165,46167,46169,46172,46174,46177,46179,46181,46183],{"class":55,"line":503},[53,46163,46164],{"class":82}," [ ",[53,46166,39216],{"class":389},[53,46168,6452],{"class":63},[53,46170,46171],{"class":82},"$CATALINA_HOME",[53,46173,2385],{"class":63},[53,46175,46176],{"class":82}," ] && TOMCAT_HOME",[53,46178,390],{"class":389},[53,46180,2385],{"class":63},[53,46182,46171],{"class":82},[53,46184,31375],{"class":63},[53,46186,46187,46189,46191,46193,46195,46197,46199,46201,46203,46206],{"class":55,"line":509},[53,46188,46164],{"class":82},[53,46190,31285],{"class":389},[53,46192,6452],{"class":63},[53,46194,46153],{"class":82},[53,46196,2385],{"class":63},[53,46198,46176],{"class":82},[53,46200,390],{"class":389},[53,46202,2385],{"class":63},[53,46204,46205],{"class":82},"$OPENCMS_BASE",[53,46207,46208],{"class":63},"\"/../../..\n",[53,46210,46211],{"class":55,"line":515},[53,46212,4373],{"class":389},[53,46214,46215,46218,46220],{"class":55,"line":521},[53,46216,46217],{"class":82},"TOMCAT_CLASSPATH",[53,46219,390],{"class":389},[53,46221,46222],{"class":63},"\"\"\n",[53,46224,46225],{"class":55,"line":527},[53,46226,46227],{"class":3698},"# Support for tomcat 5\n",[53,46229,46230,46232,46235,46237,46240,46243,46245],{"class":55,"line":533},[53,46231,32362],{"class":389},[53,46233,46234],{"class":82}," JAR ",[53,46236,32368],{"class":389},[53,46238,46239],{"class":82}," ${TOMCAT_HOME}",[53,46241,46242],{"class":63},"/common/lib/*.jar",[53,46244,31207],{"class":82},[53,46246,31210],{"class":389},[53,46248,46249,46252,46254,46256,46258,46261,46264],{"class":55,"line":539},[53,46250,46251],{"class":82}," TOMCAT_CLASSPATH",[53,46253,390],{"class":389},[53,46255,39265],{"class":63},[53,46257,46217],{"class":82},[53,46259,46260],{"class":63},"}:${",[53,46262,46263],{"class":82},"JAR",[53,46265,46266],{"class":63},"}\"\n",[53,46268,46269],{"class":55,"line":545},[53,46270,31255],{"class":389},[53,46272,46273,46275,46277,46279,46281,46284,46286],{"class":55,"line":2414},[53,46274,32362],{"class":389},[53,46276,46234],{"class":82},[53,46278,32368],{"class":389},[53,46280,46239],{"class":82},[53,46282,46283],{"class":63},"/shared/lib/*.jar",[53,46285,31207],{"class":82},[53,46287,31210],{"class":389},[53,46289,46290,46292,46294,46296,46298,46300,46302],{"class":55,"line":2426},[53,46291,46251],{"class":82},[53,46293,390],{"class":389},[53,46295,39265],{"class":63},[53,46297,46217],{"class":82},[53,46299,46260],{"class":63},[53,46301,46263],{"class":82},[53,46303,46266],{"class":63},[53,46305,46306],{"class":55,"line":2438},[53,46307,31255],{"class":389},[53,46309,46310],{"class":55,"line":2451},[53,46311,46312],{"class":3698},"# Support for tomcat 6\n",[53,46314,46315,46317,46319,46321,46323,46326,46328],{"class":55,"line":2459},[53,46316,32362],{"class":389},[53,46318,46234],{"class":82},[53,46320,32368],{"class":389},[53,46322,46239],{"class":82},[53,46324,46325],{"class":63},"/lib/*.jar",[53,46327,31207],{"class":82},[53,46329,31210],{"class":389},[53,46331,46332,46334,46336,46338,46340,46342,46344],{"class":55,"line":2470},[53,46333,46251],{"class":82},[53,46335,390],{"class":389},[53,46337,39265],{"class":63},[53,46339,46217],{"class":82},[53,46341,46260],{"class":63},[53,46343,46263],{"class":82},[53,46345,46266],{"class":63},[53,46347,46348],{"class":55,"line":2476},[53,46349,31255],{"class":389},[53,46351,46352,46355,46357],{"class":55,"line":2484},[53,46353,46354],{"class":82},"OPENCMS_CLASSPATH",[53,46356,390],{"class":389},[53,46358,46222],{"class":63},[53,46360,46361,46363,46365,46367,46370,46372,46374],{"class":55,"line":2490},[53,46362,32362],{"class":389},[53,46364,46234],{"class":82},[53,46366,32368],{"class":389},[53,46368,46369],{"class":82}," ${OPENCMS_BASE}",[53,46371,46325],{"class":63},[53,46373,31207],{"class":82},[53,46375,31210],{"class":389},[53,46377,46378,46381,46383,46385,46387,46389,46391],{"class":55,"line":2495},[53,46379,46380],{"class":82}," OPENCMS_CLASSPATH",[53,46382,390],{"class":389},[53,46384,39265],{"class":63},[53,46386,46354],{"class":82},[53,46388,46260],{"class":63},[53,46390,46263],{"class":82},[53,46392,46266],{"class":63},[53,46394,46395],{"class":55,"line":2507},[53,46396,31255],{"class":389},[53,46398,46399,46401,46404,46407,46409,46411,46413,46416,46419,46422,46424,46426,46428,46430,46433],{"class":55,"line":2528},[53,46400,21472],{"class":59},[53,46402,46403],{"class":89}," -classpath",[53,46405,46406],{"class":63}," \"${",[53,46408,46354],{"class":82},[53,46410,46260],{"class":63},[53,46412,46217],{"class":82},[53,46414,46415],{"class":63},"}:classes\"",[53,46417,46418],{"class":63}," evalscripts.groovy",[53,46420,46421],{"class":89}," -base=",[53,46423,39265],{"class":63},[53,46425,46125],{"class":82},[53,46427,6291],{"class":63},[53,46429,6452],{"class":63},[53,46431,46432],{"class":89},"$@",[53,46434,31375],{"class":63},[18,46436,46437],{},"As you can see, a groovy script named “evalscripts.groovy” is called and all options are passed to it. The script:",[43,46439,46442],{"className":46440,"code":46441,"language":21472,"meta":48,"style":48},"language-groovy shiki shiki-themes github-light github-dark","\nimport org.opencms.main.CmsShell;\nimport org.opencms.file.CmsObject;\nbase = args[0].substring(CmsShell.SHELL_PARAM_BASE.length());\nshell = new CmsShell(base, null, null, \">\", null) {\n CmsObject getCmsObject() {\n return m_cms;\n }\n}\nuser = \"Admin\";\npass = \"admin\";\ncms = shell.getCmsObject();\ncms.loginUser(user, pass);\nbinding1 = new Binding();\nbinding1.setProperty('cmsObject' , cms);\ngroovyShell = new GroovyShell(binding1);\nfor (int i = 1; i \u003C args.length; i++) {\n groovyShell.evaluate(new File(args[i]))\n}\nshell.exit();\n\n",[50,46443,46444,46448,46453,46458,46463,46468,46473,46478,46482,46486,46491,46496,46501,46506,46511,46516,46521,46526,46531,46535],{"__ignoreMap":48},[53,46445,46446],{"class":55,"line":56},[53,46447,500],{"emptyLinePlaceholder":499},[53,46449,46450],{"class":55,"line":86},[53,46451,46452],{},"import org.opencms.main.CmsShell;\n",[53,46454,46455],{"class":55,"line":126},[53,46456,46457],{},"import org.opencms.file.CmsObject;\n",[53,46459,46460],{"class":55,"line":163},[53,46461,46462],{},"base = args[0].substring(CmsShell.SHELL_PARAM_BASE.length());\n",[53,46464,46465],{"class":55,"line":186},[53,46466,46467],{},"shell = new CmsShell(base, null, null, \">\", null) {\n",[53,46469,46470],{"class":55,"line":221},[53,46471,46472],{}," CmsObject getCmsObject() {\n",[53,46474,46475],{"class":55,"line":242},[53,46476,46477],{}," return m_cms;\n",[53,46479,46480],{"class":55,"line":273},[53,46481,860],{},[53,46483,46484],{"class":55,"line":279},[53,46485,282],{},[53,46487,46488],{"class":55,"line":496},[53,46489,46490],{},"user = \"Admin\";\n",[53,46492,46493],{"class":55,"line":503},[53,46494,46495],{},"pass = \"admin\";\n",[53,46497,46498],{"class":55,"line":509},[53,46499,46500],{},"cms = shell.getCmsObject();\n",[53,46502,46503],{"class":55,"line":515},[53,46504,46505],{},"cms.loginUser(user, pass);\n",[53,46507,46508],{"class":55,"line":521},[53,46509,46510],{},"binding1 = new Binding();\n",[53,46512,46513],{"class":55,"line":527},[53,46514,46515],{},"binding1.setProperty('cmsObject' , cms);\n",[53,46517,46518],{"class":55,"line":533},[53,46519,46520],{},"groovyShell = new GroovyShell(binding1);\n",[53,46522,46523],{"class":55,"line":539},[53,46524,46525],{},"for (int i = 1; i \u003C args.length; i++) {\n",[53,46527,46528],{"class":55,"line":545},[53,46529,46530],{}," groovyShell.evaluate(new File(args[i]))\n",[53,46532,46533],{"class":55,"line":2414},[53,46534,282],{},[53,46536,46537],{"class":55,"line":2426},[53,46538,46539],{},"shell.exit();\n",[18,46541,46542],{},"We start by creating an instance of the CmsShell class and make the underlying CmsObject accessible. We login using the\nAdmin user and bind the instance so we can use it in the scripts that are doing the real work. This is where you come\ninto play: You can write any groovy script that uses this CmsObject and do whatever you want. Some ideas? Why not create\nsome users:",[43,46544,46546],{"className":46440,"code":46545,"language":21472,"meta":48,"style":48},"\n10.times {\n cmsObject.createUser(\"User$it\", \"Pass$it\", \"\", new HashMap());\n}\n\n",[50,46547,46548,46552,46557,46562],{"__ignoreMap":48},[53,46549,46550],{"class":55,"line":56},[53,46551,500],{"emptyLinePlaceholder":499},[53,46553,46554],{"class":55,"line":86},[53,46555,46556],{},"10.times {\n",[53,46558,46559],{"class":55,"line":126},[53,46560,46561],{}," cmsObject.createUser(\"User$it\", \"Pass$it\", \"\", new HashMap());\n",[53,46563,46564],{"class":55,"line":163},[53,46565,282],{},[18,46567,46568],{},"Or list all users:",[43,46570,46572],{"className":46440,"code":46571,"language":21472,"meta":48,"style":48},"\ncmsObject.getUsers().each {\n println it.name\n}\n\n",[50,46573,46574,46578,46583,46588],{"__ignoreMap":48},[53,46575,46576],{"class":55,"line":56},[53,46577,500],{"emptyLinePlaceholder":499},[53,46579,46580],{"class":55,"line":86},[53,46581,46582],{},"cmsObject.getUsers().each {\n",[53,46584,46585],{"class":55,"line":126},[53,46586,46587],{}," println it.name\n",[53,46589,46590],{"class":55,"line":163},[53,46591,282],{},[18,46593,46594],{},"How do you use it? You pass the path to the scripts that contain your logic to the shell script and it will execute them\nautomatically. Suppose the shell script is named groovyshell.sh and the groovy files are named createUsers.groovy and\nlistUsers.groovy. Execute them like this:",[18,46596,46597],{},[50,46598,46599],{},"./groovyshell.sh createUsers.groovy listUsers.groovy",[18,46601,46602],{},"You will see the familiar OpenCms startup sequence followed by the output of the second script:",[43,46604,46606],{"className":30754,"code":46605,"language":30756,"meta":48,"style":48},"\n...\nWelcome to the OpenCms shell!\nThis is OpenCms 7.5.x.\nCopyright (c) 2010 Alkacon Software GmbH\nOpenCms comes with ABSOLUTELY NO WARRANTY\nThis is free software, and you are welcome to\nredistribute it under certain conditions.\nPlease see the GNU Lesser General Public Licence for\nfurther details.\nhelp Shows this text.\nhelp * Shows the signatures of all available methods.\nhelp {string} Shows the signatures of all methods containing this string.\nexit or quit Leaves this OpenCms Shell.\nAdmin\nExport\nGuest\nUser0\nUser1\nUser2\nUser3\nUser4\nUser5\nUser6\nUser7\nUser8\nUser9\nGoodbye!\n...\n\n",[50,46607,46608,46612,46616,46631,46643,46651,46669,46696,46713,46741,46749,46762,46787,46816,46837,46842,46847,46852,46857,46862,46867,46872,46877,46882,46887,46892,46897,46902,46907],{"__ignoreMap":48},[53,46609,46610],{"class":55,"line":56},[53,46611,500],{"emptyLinePlaceholder":499},[53,46613,46614],{"class":55,"line":86},[53,46615,6348],{"class":89},[53,46617,46618,46621,46623,46625,46628],{"class":55,"line":126},[53,46619,46620],{"class":59},"Welcome",[53,46622,378],{"class":63},[53,46624,112],{"class":63},[53,46626,46627],{"class":63}," OpenCms",[53,46629,46630],{"class":63}," shell!\n",[53,46632,46633,46636,46638,46640],{"class":55,"line":163},[53,46634,46635],{"class":59},"This",[53,46637,7198],{"class":63},[53,46639,46627],{"class":63},[53,46641,46642],{"class":63}," 7.5.x.\n",[53,46644,46645,46648],{"class":55,"line":186},[53,46646,46647],{"class":59},"Copyright",[53,46649,46650],{"class":82}," (c) 2010 Alkacon Software GmbH\n",[53,46652,46653,46655,46658,46660,46663,46666],{"class":55,"line":221},[53,46654,41231],{"class":59},[53,46656,46657],{"class":63}," comes",[53,46659,1178],{"class":63},[53,46661,46662],{"class":63}," ABSOLUTELY",[53,46664,46665],{"class":63}," NO",[53,46667,46668],{"class":63}," WARRANTY\n",[53,46670,46671,46673,46675,46678,46681,46684,46687,46690,46693],{"class":55,"line":242},[53,46672,46635],{"class":59},[53,46674,7198],{"class":63},[53,46676,46677],{"class":63}," free",[53,46679,46680],{"class":63}," software,",[53,46682,46683],{"class":63}," and",[53,46685,46686],{"class":63}," you",[53,46688,46689],{"class":63}," are",[53,46691,46692],{"class":63}," welcome",[53,46694,46695],{"class":63}," to\n",[53,46697,46698,46701,46704,46707,46710],{"class":55,"line":273},[53,46699,46700],{"class":59},"redistribute",[53,46702,46703],{"class":63}," it",[53,46705,46706],{"class":63}," under",[53,46708,46709],{"class":63}," certain",[53,46711,46712],{"class":63}," conditions.\n",[53,46714,46715,46718,46721,46723,46726,46729,46732,46735,46738],{"class":55,"line":279},[53,46716,46717],{"class":59},"Please",[53,46719,46720],{"class":63}," see",[53,46722,112],{"class":63},[53,46724,46725],{"class":63}," GNU",[53,46727,46728],{"class":63}," Lesser",[53,46730,46731],{"class":63}," General",[53,46733,46734],{"class":63}," Public",[53,46736,46737],{"class":63}," Licence",[53,46739,46740],{"class":63}," for\n",[53,46742,46743,46746],{"class":55,"line":496},[53,46744,46745],{"class":59},"further",[53,46747,46748],{"class":63}," details.\n",[53,46750,46751,46754,46757,46759],{"class":55,"line":503},[53,46752,46753],{"class":59},"help",[53,46755,46756],{"class":63}," Shows",[53,46758,11010],{"class":63},[53,46760,46761],{"class":63}," text.\n",[53,46763,46764,46766,46768,46771,46773,46776,46778,46781,46784],{"class":55,"line":509},[53,46765,46753],{"class":59},[53,46767,1058],{"class":89},[53,46769,46770],{"class":63}," Shows",[53,46772,112],{"class":63},[53,46774,46775],{"class":63}," signatures",[53,46777,109],{"class":63},[53,46779,46780],{"class":63}," all",[53,46782,46783],{"class":63}," available",[53,46785,46786],{"class":63}," methods.\n",[53,46788,46789,46791,46794,46797,46799,46801,46803,46805,46808,46811,46813],{"class":55,"line":515},[53,46790,46753],{"class":59},[53,46792,46793],{"class":63}," {string}",[53,46795,46796],{"class":63}," Shows",[53,46798,112],{"class":63},[53,46800,46775],{"class":63},[53,46802,109],{"class":63},[53,46804,46780],{"class":63},[53,46806,46807],{"class":63}," methods",[53,46809,46810],{"class":63}," containing",[53,46812,11010],{"class":63},[53,46814,46815],{"class":63}," string.\n",[53,46817,46818,46821,46824,46827,46830,46832,46834],{"class":55,"line":521},[53,46819,46820],{"class":89},"exit",[53,46822,46823],{"class":63}," or",[53,46825,46826],{"class":63}," quit",[53,46828,46829],{"class":63}," Leaves",[53,46831,11010],{"class":63},[53,46833,46627],{"class":63},[53,46835,46836],{"class":63}," Shell.\n",[53,46838,46839],{"class":55,"line":527},[53,46840,46841],{"class":59},"Admin\n",[53,46843,46844],{"class":55,"line":533},[53,46845,46846],{"class":59},"Export\n",[53,46848,46849],{"class":55,"line":539},[53,46850,46851],{"class":59},"Guest\n",[53,46853,46854],{"class":55,"line":545},[53,46855,46856],{"class":59},"User0\n",[53,46858,46859],{"class":55,"line":2414},[53,46860,46861],{"class":59},"User1\n",[53,46863,46864],{"class":55,"line":2426},[53,46865,46866],{"class":59},"User2\n",[53,46868,46869],{"class":55,"line":2438},[53,46870,46871],{"class":59},"User3\n",[53,46873,46874],{"class":55,"line":2451},[53,46875,46876],{"class":59},"User4\n",[53,46878,46879],{"class":55,"line":2459},[53,46880,46881],{"class":59},"User5\n",[53,46883,46884],{"class":55,"line":2470},[53,46885,46886],{"class":59},"User6\n",[53,46888,46889],{"class":55,"line":2476},[53,46890,46891],{"class":59},"User7\n",[53,46893,46894],{"class":55,"line":2484},[53,46895,46896],{"class":59},"User8\n",[53,46898,46899],{"class":55,"line":2490},[53,46900,46901],{"class":59},"User9\n",[53,46903,46904],{"class":55,"line":2495},[53,46905,46906],{"class":59},"Goodbye!\n",[53,46908,46909],{"class":55,"line":2507},[53,46910,6348],{"class":89},[18,46912,46913],{},"I think this will be useful for us in the future, maybe also for you?",[607,46915,46916],{},"html pre.shiki code .sJ8bj, html code.shiki .sJ8bj{--shiki-default:#6A737D;--shiki-dark:#6A737D}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}",{"title":48,"searchDepth":86,"depth":86,"links":46918},[],[613,996],"2011-01-10T15:16:36","OpenCms ships with a shell script for accessing the virtual file system from the command line.\\nThis is useful for a lot of administrative tasks like importing modules or exporting content. You can supply scripts\\nthat issue commands or use the shell interactively. As the syntax for the scripts is quite limited some tasks can’t be\\nachieved that easily. This blogpost\\ndescribes a way to circumvent these problems by generating the script files.","https://synyx.de/blog/scripting-opencms/",{},"/blog/scripting-opencms",{"title":46046,"description":46926},"OpenCms ships with a shell script for accessing the virtual file system from the command line.\nThis is useful for a lot of administrative tasks like importing modules or exporting content. You can supply scripts\nthat issue commands or use the shell interactively. As the syntax for the scripts is quite limited some tasks can’t be\nachieved that easily. This blogpost\ndescribes a way to circumvent these problems by generating the script files.","blog/scripting-opencms",[21472,40376],"OpenCms ships with a shell script for accessing the virtual file system from the command line. This is useful for a lot of administrative tasks like importing modules or exporting…","v8fncIfA9ZnYPM1Luj9Uvx7ZbqqqhEZaXHZoobwoM-0",{"id":46932,"title":46933,"author":46934,"body":46935,"category":47335,"date":47336,"description":47337,"extension":617,"link":47338,"meta":47339,"navigation":499,"path":47340,"seo":47341,"slug":46939,"stem":47343,"tags":47344,"teaser":47345,"__hash__":47346},"blog/blog/integration-tests-for-your-solr-config.md","Integration tests for your Solr config",[42564],{"type":11,"value":46936,"toc":47333},[46937,46940,46951,46960,46963,47091,47094,47097,47100,47331],[14,46938,46933],{"id":46939},"integration-tests-for-your-solr-config",[18,46941,46942,46945,46946,46950],{},[585,46943,45172],{"href":45170,"rel":46944},[589]," is a search server that bundles a lot of\nuseful ",[585,46947,46949],{"href":45176,"rel":46948},[589],"Lucene"," modules and provides an HTTP interface for querying and updating the data.\nThe index and most of the query mechanisms are configured using XML documents, client applications normally don’t need\nto be changed when adjusting the server configuration. As the server configuration heavily influences the quality of\nyour users search experience it’s a good idea to implement some integration tests that validate your functionality.",[18,46952,46953,46954,46959],{},"Solr ships with a useful abstract ",[585,46955,46958],{"href":46956,"rel":46957},"http://junit.org/",[589],"JUnit"," test case that can be used as a basis for your integration\ntests. I will demonstrate how to fire up a simple test using maven.",[18,46961,46962],{},"The most important part is the dependencies section of the pom:",[43,46964,46966],{"className":13786,"code":46965,"language":13788,"meta":48,"style":48},"\n \u003Cdependency>\n \u003CgroupId>junit\u003C/groupId>\n \u003CartifactId>junit\u003C/artifactId>\n \u003Cversion>4.7\u003C/version>\n \u003Ctype>jar\u003C/type>\n \u003Cscope>test\u003C/scope>\n \u003C/dependency>\n \u003C!-- dependencies needed for Solr integration test-->\n \u003Cdependency>\n \u003CgroupId>org.apache.solr\u003C/groupId>\n \u003CartifactId>solr-core\u003C/artifactId>\n \u003Cversion>1.4.1\u003C/version>\n \u003Cscope>test\u003C/scope>\n \u003C/dependency>\n \u003Cdependency>\n \u003CgroupId>org.slf4j\u003C/groupId>\n \u003CartifactId>slf4j-simple\u003C/artifactId>\n \u003Cversion>1.6.1\u003C/version>\n \u003Cscope>test\u003C/scope>\n \u003C/dependency>\n \u003Cdependency>\n \u003CgroupId>javax.servlet\u003C/groupId>\n \u003CartifactId>servlet-api\u003C/artifactId>\n \u003Cversion>2.5\u003C/version>\n \u003Cscope>test\u003C/scope>\n \u003C/dependency>\n\n",[50,46967,46968,46972,46976,46981,46986,46991,46996,47001,47005,47010,47014,47019,47024,47029,47033,47037,47041,47046,47051,47056,47060,47064,47068,47073,47078,47083,47087],{"__ignoreMap":48},[53,46969,46970],{"class":55,"line":56},[53,46971,500],{"emptyLinePlaceholder":499},[53,46973,46974],{"class":55,"line":86},[53,46975,26990],{},[53,46977,46978],{"class":55,"line":126},[53,46979,46980],{}," \u003CgroupId>junit\u003C/groupId>\n",[53,46982,46983],{"class":55,"line":163},[53,46984,46985],{}," \u003CartifactId>junit\u003C/artifactId>\n",[53,46987,46988],{"class":55,"line":186},[53,46989,46990],{}," \u003Cversion>4.7\u003C/version>\n",[53,46992,46993],{"class":55,"line":221},[53,46994,46995],{}," \u003Ctype>jar\u003C/type>\n",[53,46997,46998],{"class":55,"line":242},[53,46999,47000],{}," \u003Cscope>test\u003C/scope>\n",[53,47002,47003],{"class":55,"line":273},[53,47004,27005],{},[53,47006,47007],{"class":55,"line":279},[53,47008,47009],{}," \u003C!-- dependencies needed for Solr integration test-->\n",[53,47011,47012],{"class":55,"line":496},[53,47013,26990],{},[53,47015,47016],{"class":55,"line":503},[53,47017,47018],{}," \u003CgroupId>org.apache.solr\u003C/groupId>\n",[53,47020,47021],{"class":55,"line":509},[53,47022,47023],{}," \u003CartifactId>solr-core\u003C/artifactId>\n",[53,47025,47026],{"class":55,"line":515},[53,47027,47028],{}," \u003Cversion>1.4.1\u003C/version>\n",[53,47030,47031],{"class":55,"line":521},[53,47032,47000],{},[53,47034,47035],{"class":55,"line":527},[53,47036,27005],{},[53,47038,47039],{"class":55,"line":533},[53,47040,26990],{},[53,47042,47043],{"class":55,"line":539},[53,47044,47045],{}," \u003CgroupId>org.slf4j\u003C/groupId>\n",[53,47047,47048],{"class":55,"line":545},[53,47049,47050],{}," \u003CartifactId>slf4j-simple\u003C/artifactId>\n",[53,47052,47053],{"class":55,"line":2414},[53,47054,47055],{}," \u003Cversion>1.6.1\u003C/version>\n",[53,47057,47058],{"class":55,"line":2426},[53,47059,47000],{},[53,47061,47062],{"class":55,"line":2438},[53,47063,27005],{},[53,47065,47066],{"class":55,"line":2451},[53,47067,26990],{},[53,47069,47070],{"class":55,"line":2459},[53,47071,47072],{}," \u003CgroupId>javax.servlet\u003C/groupId>\n",[53,47074,47075],{"class":55,"line":2470},[53,47076,47077],{}," \u003CartifactId>servlet-api\u003C/artifactId>\n",[53,47079,47080],{"class":55,"line":2476},[53,47081,47082],{}," \u003Cversion>2.5\u003C/version>\n",[53,47084,47085],{"class":55,"line":2484},[53,47086,47000],{},[53,47088,47089],{"class":55,"line":2490},[53,47090,27005],{},[18,47092,47093],{},"We are including a recent JUnit version to enjoy some of the annotation goodness. solr-core contains all of the server\ncomponents as well as the test case, slf4j is used for logging. Of course you have to check if any of the artifacts\nconflict with runtime or compile time dependencies.",[18,47095,47096],{},"To run a simple test case against the example index config shipped with solr 1.4.1 copy or link the folder\napache-solr-1.4.1/example/solr/ to your projects basedir.",[18,47098,47099],{},"An example test case that checks if a value is found for a valid search:",[43,47101,47103],{"className":13786,"code":47102,"language":13788,"meta":48,"style":48},"\nimport java.io.IOException;\nimport org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;\nimport org.apache.solr.util.AbstractSolrTestCase;\nimport org.apache.solr.client.solrj.SolrQuery;\nimport org.apache.solr.client.solrj.SolrServer;\nimport org.apache.solr.client.solrj.SolrServerException;\nimport org.apache.solr.client.solrj.response.QueryResponse;\nimport org.apache.solr.common.SolrInputDocument;\nimport org.apache.solr.common.params.SolrParams;\nimport org.junit.Before;\nimport org.junit.Test;\nimport static org.junit.Assert.assertEquals;\npublic class SolrSearchConfigTest extends AbstractSolrTestCase {\n private SolrServer server;\n @Override\n public String getSchemaFile() {\n return \"solr/conf/schema.xml\";\n }\n @Override\n public String getSolrConfigFile() {\n return \"solr/conf/solrconfig.xml\";\n }\n @Before\n @Override\n public void setUp() throws Exception {\n super.setUp();\n server = new EmbeddedSolrServer(h.getCoreContainer(), h.getCore().getName());\n }\n @Test\n public void testThatNoResultsAreReturned() throws SolrServerException {\n SolrParams params = new SolrQuery(\"text that is not found\");\n QueryResponse response = server.query(params);\n assertEquals(0L, response.getResults().getNumFound());\n }\n @Test\n public void testThatDocumentIsFound() throws SolrServerException, IOException {\n SolrInputDocument document = new SolrInputDocument();\n document.addField(\"id\", \"1\");\n document.addField(\"name\", \"my name\");\n server.add(document);\n server.commit();\n SolrParams params = new SolrQuery(\"name\");\n QueryResponse response = server.query(params);\n assertEquals(1L, response.getResults().getNumFound());\n assertEquals(\"1\", response.getResults().get(0).get(\"id\"));\n }\n}\n\n",[50,47104,47105,47109,47114,47119,47124,47129,47134,47139,47144,47149,47154,47159,47163,47168,47173,47178,47182,47187,47192,47196,47200,47205,47210,47214,47219,47223,47228,47233,47238,47242,47246,47251,47256,47261,47266,47270,47274,47279,47284,47289,47294,47299,47304,47309,47313,47318,47323,47327],{"__ignoreMap":48},[53,47106,47107],{"class":55,"line":56},[53,47108,500],{"emptyLinePlaceholder":499},[53,47110,47111],{"class":55,"line":86},[53,47112,47113],{},"import java.io.IOException;\n",[53,47115,47116],{"class":55,"line":126},[53,47117,47118],{},"import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;\n",[53,47120,47121],{"class":55,"line":163},[53,47122,47123],{},"import org.apache.solr.util.AbstractSolrTestCase;\n",[53,47125,47126],{"class":55,"line":186},[53,47127,47128],{},"import org.apache.solr.client.solrj.SolrQuery;\n",[53,47130,47131],{"class":55,"line":221},[53,47132,47133],{},"import org.apache.solr.client.solrj.SolrServer;\n",[53,47135,47136],{"class":55,"line":242},[53,47137,47138],{},"import org.apache.solr.client.solrj.SolrServerException;\n",[53,47140,47141],{"class":55,"line":273},[53,47142,47143],{},"import org.apache.solr.client.solrj.response.QueryResponse;\n",[53,47145,47146],{"class":55,"line":279},[53,47147,47148],{},"import org.apache.solr.common.SolrInputDocument;\n",[53,47150,47151],{"class":55,"line":496},[53,47152,47153],{},"import org.apache.solr.common.params.SolrParams;\n",[53,47155,47156],{"class":55,"line":503},[53,47157,47158],{},"import org.junit.Before;\n",[53,47160,47161],{"class":55,"line":509},[53,47162,42661],{},[53,47164,47165],{"class":55,"line":515},[53,47166,47167],{},"import static org.junit.Assert.assertEquals;\n",[53,47169,47170],{"class":55,"line":521},[53,47171,47172],{},"public class SolrSearchConfigTest extends AbstractSolrTestCase {\n",[53,47174,47175],{"class":55,"line":527},[53,47176,47177],{}," private SolrServer server;\n",[53,47179,47180],{"class":55,"line":533},[53,47181,9049],{},[53,47183,47184],{"class":55,"line":539},[53,47185,47186],{}," public String getSchemaFile() {\n",[53,47188,47189],{"class":55,"line":545},[53,47190,47191],{}," return \"solr/conf/schema.xml\";\n",[53,47193,47194],{"class":55,"line":2414},[53,47195,860],{},[53,47197,47198],{"class":55,"line":2426},[53,47199,9049],{},[53,47201,47202],{"class":55,"line":2438},[53,47203,47204],{}," public String getSolrConfigFile() {\n",[53,47206,47207],{"class":55,"line":2451},[53,47208,47209],{}," return \"solr/conf/solrconfig.xml\";\n",[53,47211,47212],{"class":55,"line":2459},[53,47213,860],{},[53,47215,47216],{"class":55,"line":2470},[53,47217,47218],{}," @Before\n",[53,47220,47221],{"class":55,"line":2476},[53,47222,9049],{},[53,47224,47225],{"class":55,"line":2484},[53,47226,47227],{}," public void setUp() throws Exception {\n",[53,47229,47230],{"class":55,"line":2490},[53,47231,47232],{}," super.setUp();\n",[53,47234,47235],{"class":55,"line":2495},[53,47236,47237],{}," server = new EmbeddedSolrServer(h.getCoreContainer(), h.getCore().getName());\n",[53,47239,47240],{"class":55,"line":2507},[53,47241,860],{},[53,47243,47244],{"class":55,"line":2528},[53,47245,928],{},[53,47247,47248],{"class":55,"line":2539},[53,47249,47250],{}," public void testThatNoResultsAreReturned() throws SolrServerException {\n",[53,47252,47253],{"class":55,"line":2551},[53,47254,47255],{}," SolrParams params = new SolrQuery(\"text that is not found\");\n",[53,47257,47258],{"class":55,"line":2562},[53,47259,47260],{}," QueryResponse response = server.query(params);\n",[53,47262,47263],{"class":55,"line":2573},[53,47264,47265],{}," assertEquals(0L, response.getResults().getNumFound());\n",[53,47267,47268],{"class":55,"line":2585},[53,47269,860],{},[53,47271,47272],{"class":55,"line":2593},[53,47273,928],{},[53,47275,47276],{"class":55,"line":2600},[53,47277,47278],{}," public void testThatDocumentIsFound() throws SolrServerException, IOException {\n",[53,47280,47281],{"class":55,"line":2605},[53,47282,47283],{}," SolrInputDocument document = new SolrInputDocument();\n",[53,47285,47286],{"class":55,"line":2610},[53,47287,47288],{}," document.addField(\"id\", \"1\");\n",[53,47290,47291],{"class":55,"line":2622},[53,47292,47293],{}," document.addField(\"name\", \"my name\");\n",[53,47295,47296],{"class":55,"line":2638},[53,47297,47298],{}," server.add(document);\n",[53,47300,47301],{"class":55,"line":2649},[53,47302,47303],{}," server.commit();\n",[53,47305,47306],{"class":55,"line":2660},[53,47307,47308],{}," SolrParams params = new SolrQuery(\"name\");\n",[53,47310,47311],{"class":55,"line":2672},[53,47312,47260],{},[53,47314,47315],{"class":55,"line":2683},[53,47316,47317],{}," assertEquals(1L, response.getResults().getNumFound());\n",[53,47319,47320],{"class":55,"line":2694},[53,47321,47322],{}," assertEquals(\"1\", response.getResults().get(0).get(\"id\"));\n",[53,47324,47325],{"class":55,"line":2701},[53,47326,860],{},[53,47328,47329],{"class":55,"line":2708},[53,47330,282],{},[607,47332,989],{},{"title":48,"searchDepth":86,"depth":86,"links":47334},[],[613,996],"2011-01-01T19:31:22","Solr is a search server that bundles a lot of\\nuseful Lucene modules and provides an HTTP interface for querying and updating the data.\\nThe index and most of the query mechanisms are configured using XML documents, client applications normally don’t need\\nto be changed when adjusting the server configuration. As the server configuration heavily influences the quality of\\nyour users search experience it’s a good idea to implement some integration tests that validate your functionality.","https://synyx.de/blog/integration-tests-for-your-solr-config/",{},"/blog/integration-tests-for-your-solr-config",{"title":46933,"description":47342},"Solr is a search server that bundles a lot of\nuseful Lucene modules and provides an HTTP interface for querying and updating the data.\nThe index and most of the query mechanisms are configured using XML documents, client applications normally don’t need\nto be changed when adjusting the server configuration. As the server configuration heavily influences the quality of\nyour users search experience it’s a good idea to implement some integration tests that validate your functionality.","blog/integration-tests-for-your-solr-config",[10891,45216,21474],"Solr is a search server that bundles a lot of useful Lucene modules and provides an HTTP interface for querying and updating the data. The index and most of the…","tIqboctYIr9FB0W_MW0lZsbkuMIgue2OGo8FnULd54M",{"id":47348,"title":47349,"author":47350,"body":47351,"category":47440,"date":47441,"description":47442,"extension":617,"link":47443,"meta":47444,"navigation":499,"path":47445,"seo":47446,"slug":47448,"stem":47449,"tags":47450,"teaser":47453,"__hash__":47454},"blog/blog/devoxx-2010-revisited.md","Devoxx 2010 – Revisited using Parleys",[12981],{"type":11,"value":47352,"toc":47438},[47353,47356,47377,47380,47383,47386,47395,47404,47412,47419,47425],[14,47354,47349],{"id":47355},"devoxx-2010-revisited-using-parleys",[18,47357,47358,47359,47363,47364,1073,47368,1073,47372,47376],{},"This year in November three of my colleagues and I were visiting the best Java conference\never – ",[585,47360,41397],{"href":47361,"rel":47362},"http://devoxx.com/display/Devoxx2K10/Home",[589]," in Antwerp (blogs\nhere: ",[585,47365,2546],{"href":47366,"rel":47367},"http://blog.synyx.de/2010/11/devoxx-2010-part-1/",[589],[585,47369,2409],{"href":47370,"rel":47371},"http://blog.synyx.de/2010/11/devoxx-university/",[589],[585,47373,20200],{"href":47374,"rel":47375},"http://blog.synyx.de/2010/11/devoxx-2010-part-2/",[589],").\nNow, since more than a month of time went by its time for recall and see, what about Devoxx is still present in my mind.",[18,47378,47379],{},"The fist thing is, that the conference was very very well organized and the location was almost perfect.",[18,47381,47382],{},"The second thing, that is still in my mind is the awful “Coffee” they served. This stuff was not worth to be called\ncoffee and for a Java conference, this seems to be just wrong, guys ;). Please make the tickets to be more expensive\nnext year and get some good coffee-machines for the extra money you earn. The nerds will thank you.",[18,47384,47385],{},"But the most important thing I remember is the quality of talks and speakers. They helped me a lot and improved my work\nalready. I think Synyx invested the money for Devoxx just right, because all of us enjoyed the conference and also have\na whole new set of ideas and tools ready for production now.",[18,47387,47388,47389,47394],{},"Unfortunately, as on every conference I missed some of the great talks I was really interested in because I was busy\nseeing other talks in parallel. As I said, this happens on every conference. But Devoxx got kind of around that\nusing ",[585,47390,47393],{"href":47391,"rel":47392},"http://parleys.com",[589],"Parleys.com",", where you can watch all talks of Devoxx for around 100$ a year. Over the last\ndays, I’ve seen just about all of the ones I could not see because they were conflicting with other talks and it is just\ngreat. It seems its the perfect thing to do in nights around Christmas if you neither have Internet around, nor a TV\naccessible.",[18,47396,47397,47398,47403],{},"Parleys is a fancy webbased application (",[585,47399,47402],{"href":47400,"rel":47401},"http://get.adobe.com/air/",[589],"Adobe Air",") where you can browse through all of the\ntalks and watch them as you were used to at the Devoxx’ big cinema-Screens: They offer the video of the speaker next to\nthe slides as well as the demo-videos in a synchronous manner. You get also some nice options like enlarging one of the\nvideos, rate them, and so on.",[18,47405,47406,47407,47411],{},"But the best feature is definitely the “download” feature, since this allows you to watch the talks offline at your\ngrandmothers using the Desktop-Client. By the way, on my Linux machines I could not install this bastard using Firefox\nand ",[585,47408,47409],{"href":47409,"rel":47410},"http://parleys.com/desktop/",[589]," but i had to wget the .air file as to be found in the pages sourcecode and then run\nthe installer manually using a pre-installed Adobe Air.",[18,47413,47414,47415,47418],{},"So over the last days I’ve seen some great talks from Devoxx 2010 which I could not attend at the conference and I\ndefinitely shouldn’t have missed. Also, if you were not at Devoxx this is a perfect thing to get some of the feeling and\nknowledge at a nice price. Parleys also helped me a lot the last weeks at work where we\nintroduced ",[585,47416,41425],{"href":41423,"rel":47417},[589]," as the frontend technology of a new application. I could hand my\ncolleagues the 3 hour University-Talk about Wicket as an introduction which turned out to be great since they could\nstart using it right away afterwards.",[18,47420,47421],{},[2223,47422],{"alt":47423,"src":47424},"\"Talk in Parleys\"","https://media.synyx.de/uploads//2010/12/devoxx_revisited.jpg",[18,47426,47427,47428,99,47431,11792,47434,47437],{},"The downside of Parleys in my opinion is the way too fancy Adobe Air style of it. As this might be great for designers\nand “regulars”, I’d rather prefer more usability and performance while browsing and searching the videos over some nice\nanimations and effects. The developers of Parleys should consider to include some fast and non-fancy search-able and\nfilterable list of all talks with options like ",[573,47429,47430],{},"queue to download",[573,47432,47433],{},"mark as to be watched",[573,47435,47436],{},"watch",". I really hope\nto see these features announced when we’re coming back to Devoxx 2011.",{"title":48,"searchDepth":86,"depth":86,"links":47439},[],[613],"2010-12-28T17:46:47","This year in November three of my colleagues and I were visiting the best Java conference\\never – Devoxx in Antwerp (blogs\\nhere: 1,2,3).\\nNow, since more than a month of time went by its time for recall and see, what about Devoxx is still present in my mind.","https://synyx.de/blog/devoxx-2010-revisited/",{},"/blog/devoxx-2010-revisited",{"title":47349,"description":47447},"This year in November three of my colleagues and I were visiting the best Java conference\never – Devoxx in Antwerp (blogs\nhere: 1,2,3).\nNow, since more than a month of time went by its time for recall and see, what about Devoxx is still present in my mind.","devoxx-2010-revisited","blog/devoxx-2010-revisited",[18585,290,47451,47452],"parleys","wicket","This year in November three of my colleagues and I were visiting the best Java conference ever – Devoxx in Antwerp (blogs here: 1,2,3). Now, since more than a month…","2q1f9aI8s6IQ0sxryqynGHaJNhc6eD_d5-mQgdXOj5M",{"id":47456,"title":47457,"author":47458,"body":47459,"category":47497,"date":47498,"description":47499,"extension":617,"link":47500,"meta":47501,"navigation":499,"path":47502,"seo":47503,"slug":47463,"stem":47505,"tags":47506,"teaser":47508,"__hash__":47509},"blog/blog/netbeans-and-opencms.md","Netbeans and OpenCms",[42564],{"type":11,"value":47460,"toc":47495},[47461,47464,47471,47484,47487,47492],[14,47462,47457],{"id":47463},"netbeans-and-opencms",[18,47465,47466,47470],{},[585,47467,41231],{"href":47468,"rel":47469},"http://opencms.org",[589]," stores all its content like JSP-templates, images and CSS files in a virtual file system\nwhich makes it impossible to use normal development models. As we are using it for several years now we’ve always been\ntrying to improve our development processes.",[18,47472,47473,47474,47479,47480,986],{},"One of the results is a custom ",[585,47475,47478],{"href":47476,"rel":47477},"http://netbeans.org",[589],"Netbeans"," plugin for working with OpenCms. We’ve been using it\nsuccessfully in several projects but didn’t find the time to release it to the public until today. The plugin can now be\ndownloaded on our ",[585,47481,47483],{"href":45284,"rel":47482},[589],"open source project site",[18,47485,47486],{},"It consists of a combination of a Netbeans plugin and an OpenCms module and is targeted at development environments.\nFiles can be uploaded using the Netbeans context menu. This drastically improves the development time as changes can be\napplied and checked immediately.",[18,47488,47489],{},[2223,47490],{"alt":48,"src":47491},"https://media.synyx.de/uploads//2010/11/netbeans-dialogsmall.png",[18,47493,47494],{},"Of course we are interested if this plugin is useful for anybody so leave a comment if you like it. If you are\ninterested in any enhancements or you found a bug just issue a ticket in redmine.",{"title":48,"searchDepth":86,"depth":86,"links":47496},[],[613,996],"2010-11-26T18:31:57","OpenCms stores all its content like JSP-templates, images and CSS files in a virtual file system\\nwhich makes it impossible to use normal development models. As we are using it for several years now we’ve always been\\ntrying to improve our development processes.","https://synyx.de/blog/netbeans-and-opencms/",{},"/blog/netbeans-and-opencms",{"title":47457,"description":47504},"OpenCms stores all its content like JSP-templates, images and CSS files in a virtual file system\nwhich makes it impossible to use normal development models. As we are using it for several years now we’ve always been\ntrying to improve our development processes.","blog/netbeans-and-opencms",[47507,40376],"netbeans","OpenCms stores all its content like JSP-templates, images and CSS files in a virtual file system which makes it impossible to use normal development models. As we are using it…","Q9FAJClXp5mBFEXH6TKZXjpr0KEBVv4APPwpv1WY2_8",{"id":47511,"title":47512,"author":47513,"body":47514,"category":47558,"date":47559,"description":47560,"extension":617,"link":47561,"meta":47562,"navigation":499,"path":47563,"seo":47564,"slug":47518,"stem":47566,"tags":47567,"teaser":47569,"__hash__":47570},"blog/blog/logging-to-multiple-files-using-log4j.md","Logging to multiple files using log4j",[42564],{"type":11,"value":47515,"toc":47556},[47516,47519,47528,47531,47534,47537,47540,47543,47546,47553],[14,47517,47512],{"id":47518},"logging-to-multiple-files-using-log4j",[18,47520,47521,47522,47527],{},"Sometimes even the simple things take quite some time. For a special application log I’ve been searching for a way to\nwrite log calls for one package to a separate file using Log4j. I’ve been searching the web, skimming through the log4j\ndocumentation but didn’t really find anything. ",[585,47523,47526],{"href":47524,"rel":47525},"http://www.junlu.com/msg/93394.html",[589],"This message",", though only a\nquestion, contained the missing hint.",[18,47529,47530],{},"Suppose you have a log configuration that already has root loggers configured. To make only one package log to another\nfile, but not to the root logger appenders, you add something like this:",[18,47532,47533],{},"`log4j.logger.my.Logger=INFO,FILE",[18,47535,47536],{},"log4j.additivity.my.Logger=false",[18,47538,47539],{},"log4j.appender.FILE=org.apache.log4j.RollingFileAppender",[18,47541,47542],{},"log4j.appender.FILE.File=include-performance.log",[18,47544,47545],{},"log4j.appender.FILE.layout=org.apache.log4j.PatternLayout",[18,47547,47548,47549,47552],{},"log4j.appender.FILE.layout.ConversionPattern=%d{DATE} %5p ",[53,47550,47551],{},"%30.30C:%4L"," %m%n`",[18,47554,47555],{},"What does it do? In the first line you create a logger that logs anything above info inclusive. In the same line,\nseparated by a comma, you add the name of your appender. By setting additivity to false you prevent the logger to\ninherit any loggers from ancestors (including any root loggers).",{"title":48,"searchDepth":86,"depth":86,"links":47557},[],[613,996],"2010-11-24T18:46:17","Sometimes even the simple things take quite some time. For a special application log I’ve been searching for a way to\\nwrite log calls for one package to a separate file using Log4j. I’ve been searching the web, skimming through the log4j\\ndocumentation but didn’t really find anything. This message, though only a\\nquestion, contained the missing hint.","https://synyx.de/blog/logging-to-multiple-files-using-log4j/",{},"/blog/logging-to-multiple-files-using-log4j",{"title":47512,"description":47565},"Sometimes even the simple things take quite some time. For a special application log I’ve been searching for a way to\nwrite log calls for one package to a separate file using Log4j. I’ve been searching the web, skimming through the log4j\ndocumentation but didn’t really find anything. This message, though only a\nquestion, contained the missing hint.","blog/logging-to-multiple-files-using-log4j",[45138,47568],"logging","Sometimes even the simple things take quite some time. For a special application log I’ve been searching for a way to write log calls for one package to a separate…","sdX013OPkMBq0Tfuuz2mTT8DTbfSaM4rGRwxRY32Qzw",{"id":47572,"title":47573,"author":47574,"body":47575,"category":47619,"date":47620,"description":47621,"extension":617,"link":47622,"meta":47623,"navigation":499,"path":47624,"seo":47625,"slug":47579,"stem":47626,"tags":47627,"teaser":47628,"__hash__":47629},"blog/blog/devoxx-2010-part-2.md","Devoxx 2010 – Part 2",[12981],{"type":11,"value":47576,"toc":47617},[47577,47580,47583,47596,47599,47602,47605,47611,47614],[14,47578,47573],{"id":47579},"devoxx-2010-part-2",[18,47581,47582],{},"So this is the second part of my experiences with Devoxx 2010. I’ll still stick to not writing about the talks in\ndetail but telling my thoughts as a “first time visitor”.",[18,47584,47585,47586,47590,47591,47595],{},"So day three to five at Devoxx were “Conference” days which have – in opposite to the\nUniversity-Talks ",[585,47587,47589],{"href":47370,"rel":47588},[589],"Florian"," and I mentioned earlier – much shorter\ntalks. This had the trade-off ",[585,47592,47594],{"href":47366,"rel":47593},[589],"I mentioned before",": You have many\nswitches of topics between the talks which actually made it sometimes kind of hard to even remember at the evening what\nI saw in the morning. But what I have to admit is, that even if there were more talks a day the quality of them was\nexcellent. There were so many brilliant Speakers (Josh Bloch, Neil Ford and Matt Reible just to mention very few of\nthem) and almost all talks were as well interesting and entertaining.",[18,47597,47598],{},"So we had a lot of fun listening and I personally can say I am full (actually more than full) of new ideas and\ninspiration about what to do next and how to adopt or use things that were talked about at Synyx.",[18,47600,47601],{},"Another thing that I recognized is, that many many of the ideas, tools, frameworks etc that were showed at the different\ntalks are already in production at Synyx, which confirms us and shows that we’re on the right road. But of course there\nis still a lot of work to do, a lot of code to write and a lot of and ways to improve ourselves. Well, this was one of\nthe reasons why we’ve been visiting Devoxx anyway.",[18,47603,47604],{},"A downside of the Conference days was, that there were about 5 times as much visitors than in the first two days, which\nmade the rooms and hall really crowded and you had to wait often (getting in/out of room or going to the restrooms).\nDuring some really popular talks people (including us ;-)) had to sit at stairs or at the floor to be able listening to\nthem.",[18,47606,47607],{},[2223,47608],{"alt":47609,"src":47610},"\"A lot of people\"","https://media.synyx.de/uploads//2010/11/devoxx-bloch-crowded.jpg",[18,47612,47613],{},"I think what could be improved for the next time are the “flows” of people during the breaks. Maybe only allow people\nleaving the rooms at the lower exits and enter them at the upper ones or manage them another smart way. And maybe they\ncould have some overflow-rooms that can be dynamically used if a room gets too crowded.",[18,47615,47616],{},"To sum things up it was a great time we had at Devoxx 2010 and I’d love to come back again since this conference is\nreally the best – at least the best I’ve been so far. Keep up the good work, we really enjoyed it!",{"title":48,"searchDepth":86,"depth":86,"links":47618},[],[613],"2010-11-20T14:11:02","So this is the second part of my experiences with Devoxx 2010. I’ll still stick to not writing about the talks in\\ndetail but telling my thoughts as a “first time visitor”.","https://synyx.de/blog/devoxx-2010-part-2/",{},"/blog/devoxx-2010-part-2",{"title":47573,"description":47582},"blog/devoxx-2010-part-2",[7721,18585],"So this is the second part of my experiences with Devoxx 2010. I’ll still stick to not writing about the talks in detail but telling my thoughts as a “first…","bdOAG0IRzkdVC1t5uzzFu3CGKGq789X_iXcYF7kNrm0",{"id":47631,"title":47632,"author":47633,"body":47634,"category":47709,"date":47710,"description":47711,"extension":617,"link":47712,"meta":47713,"navigation":499,"path":47714,"seo":47715,"slug":47638,"stem":47717,"tags":47718,"teaser":47721,"__hash__":47722},"blog/blog/devoxx-university.md","Devoxx University",[42564],{"type":11,"value":47635,"toc":47707},[47636,47639,47647,47660,47686,47701,47704],[14,47637,47632],{"id":47638},"devoxx-university",[18,47640,47641,47642,47646],{},"As the university days on Devoxx are nearly finished I’d like to summarize some of the more interesting talks that\nhappened during the first two days. Marc already\nwrote ",[585,47643,47645],{"href":47366,"rel":47644},[589],"some words on the conference itself"," so I will focus on the\ntalks.",[18,47648,47649,47650,47653,47654,47659],{},"Monday started off with ",[585,47651,41419],{"href":41417,"rel":47652},[589]," talking about Productive Programmers. The topics he\ncovered are quite basic but nevertheless important to know for every developer. The first part showed how to optimize\nyour daily work, circling around the terms automation, acceleration, canonicality and focus. Some useful tools and\ntechniques were presented e.g. an ",[585,47655,47658],{"href":47656,"rel":47657},"http://www.mousefeed.com/",[589],"Eclipse plugin for learning keyboard shortcuts"," that\nsounds really nice: Every time you are using the mouse to navigate or select an action a message is displayed that tells\nyou how to do the same thing just with the keyboard. I’d be glad to have a plugin like this for Netbeans! During the\nsecond part a lot of best practices for coding were covered, always good to keep those in mind. All in all a very\ninspiring talk and a good start of the week.",[18,47661,47662,47663,47668,47669,47674,47675,47680,47681,47685],{},"For the afternoon session I chose ",[585,47664,47667],{"href":47665,"rel":47666},"http://www.mongodb.org/",[589],"MongoDB",", the document oriented database. Being already\nfamiliar with the basic concepts from ",[585,47670,47673],{"href":47671,"rel":47672},"http://blog.synyx.de/2010/08/froscon-2010/",[589],"FrOSCon"," and several podcasts I’ve\nbeen especially interested in seeing it in action using Java. Two basic approaches were introduced: The raw Java driver\nthat is developed by the MongoDB team lets you work on quite a low level handling Maps of Strings and Objects. A more\nsophisticated approach is to use the community developed ",[585,47676,47679],{"href":47677,"rel":47678},"http://code.google.com/p/morphia/",[589],"Morphia","-driver which\nallows to annotate POJOs just like when using JPA. I wouldn’t have expected to see such a nice abstraction for MongoDB\nyet, definitely something to keep an eye on. I am curious if ",[585,47682,25029],{"href":47683,"rel":47684},"http://www.springsource.org/spring-data",[589]," can\noffer something similar in the near future.",[18,47687,47688,47689,47694,47695,47700],{},"On tuesday I had to get up extra early: the Scala lab offered by Dick Wall of the ",[585,47690,47693],{"href":47691,"rel":47692},"http://www.javaposse.com/",[589],"Javaposse","\nand Bill Venners was scheduled for the BOF rooms which only fit around 50 people. Definitely the highlight of the\nconference so far as with only few attendees there was a lot of time for excercises and support by these two experts. I\nhope I can start using Scala at work soon, ",[585,47696,47699],{"href":47697,"rel":47698},"http://www.scalatest.org/",[589],"ScalaTest"," is supposed to be a good starting\npoint for learning the language without having to integrate it in a production system.",[18,47702,47703],{},"Back to Java in the afternoon: Emmanuel Bernard demonstrated new features in Hibernate and JPA 2. Besides the typesafe\nCriteria API it was really nice to see the fluent APIs used for Hibernate Search. Seems like Hibernate Search can free\nyou from a lot of programming work when integrating Lucene and Hibernate.",[18,47705,47706],{},"Looking forward to the rest of the conference which will surely bring more interesting talks during the rest of the\nweek!",{"title":48,"searchDepth":86,"depth":86,"links":47708},[],[613],"2010-11-17T09:13:24","As the university days on Devoxx are nearly finished I’d like to summarize some of the more interesting talks that\\nhappened during the first two days. Marc already\\nwrote some words on the conference itself so I will focus on the\\ntalks.","https://synyx.de/blog/devoxx-university/",{},"/blog/devoxx-university",{"title":47632,"description":47716},"As the university days on Devoxx are nearly finished I’d like to summarize some of the more interesting talks that\nhappened during the first two days. Marc already\nwrote some words on the conference itself so I will focus on the\ntalks.","blog/devoxx-university",[7721,18585,290,47719,47720],"mongodb","scala","As the university days on Devoxx are nearly finished I’d like to summarize some of the more interesting talks that happened during the first two days. Marc already wrote some…","Fa30-dvVDcsLnSS5JZec5ayVcHOjfehzKa_MalPRWgY",{"id":47724,"title":47725,"author":47726,"body":47727,"category":47774,"date":47775,"description":47776,"extension":617,"link":47777,"meta":47778,"navigation":499,"path":47779,"seo":47780,"slug":47731,"stem":47782,"tags":47783,"teaser":47784,"__hash__":47785},"blog/blog/devoxx-2010-part-1.md","Devoxx 2010 – Part 1",[12981],{"type":11,"value":47728,"toc":47772},[47729,47732,47740,47743,47746,47769],[14,47730,47725],{"id":47731},"devoxx-2010-part-1",[18,47733,47734,47735,47739],{},"Employees of Synyx are going to ",[585,47736,41397],{"href":47737,"rel":47738},"http://devoxx.com",[589]," in Antwerp since three years. I’ve never been here before\nbut my colleagues have always reportet that they loved the conference. So this is the first time I could also make it\nand I must say, just right after less than the first half: It rocks!",[18,47741,47742],{},"I am not gonna bore you with the details of the talks I visited because I leave this up to one of the other guys from\nSynyx that are here with me. 😉 Instead I’ll try to explain what I liked most/not/whatever about it from a\n“Devoxx-Newbie” perspective.",[18,47744,47745],{},"The best thing to mention so far are the University-Talks. These extra-long talks take place (unfortunately) only\nwithin the first two days of Devoxx. The special thing about them is that they last 3 hours each. This lets the speakers\ndig much deeper into their topic as they could do during a 1 or 1.5 hour talk. Additionally most of the sessions I’ve\nseen so far have some bigger demos and live-coding included. Another good thing about this is that since you can only\nvisit two a day (one in the morning and one in the afternoon) you won’t have to “switch tasks” so often, meaning you can\nkeep concentrating one one topic for a longer period of time. I like this much better than other conferences where\nyou’ll have like 6 Sessions in a row without anything in common (ok, you can follow the tracks that should have related\ntopics, but who does that…).",[18,47747,47748,47749,99,47754,99,47759,99,47763,47768],{},"By the way, if someone is interested in what University-Talks I’ve been\nvisiting: ",[585,47750,47753],{"href":47751,"rel":47752},"http://devoxx.com/display/Devoxx2K10/Hadoop+Fundamentals++HDFS%2C+MapReduce%2C+Pig%2C+and+Hive",[589],"Hadoop Fundamentals",[585,47755,47758],{"href":47756,"rel":47757},"http://devoxx.com/display/Devoxx2K10/Extreme+Productivity+with+Spring+Roo",[589],"Spring Roo",[585,47760,41425],{"href":47761,"rel":47762},"http://devoxx.com/display/Devoxx2K10/Introducing+Wicket",[589],[585,47764,47767],{"href":47765,"rel":47766},"http://devoxx.com/display/Devoxx2K10/What%27s+new+in+Hibernate++a+JPA+2+perspective",[589],"Hibernate / JPA2"," (\nclick on the links for details and full names of the talks).",[18,47770,47771],{},"So I am looking forward to the next 3 Days that are hopefully packed with new stuff to learn. So stay tuned for the\nsecond part and my resumee of Devoxx 2010.",{"title":48,"searchDepth":86,"depth":86,"links":47773},[],[613],"2010-11-16T20:59:06","Employees of Synyx are going to Devoxx in Antwerp since three years. I’ve never been here before\\nbut my colleagues have always reportet that they loved the conference. So this is the first time I could also make it\\nand I must say, just right after less than the first half: It rocks!","https://synyx.de/blog/devoxx-2010-part-1/",{},"/blog/devoxx-2010-part-1",{"title":47725,"description":47781},"Employees of Synyx are going to Devoxx in Antwerp since three years. I’ve never been here before\nbut my colleagues have always reportet that they loved the conference. So this is the first time I could also make it\nand I must say, just right after less than the first half: It rocks!","blog/devoxx-2010-part-1",[7721,18585],"Employees of Synyx are going to Devoxx in Antwerp since three years. I’ve never been here before but my colleagues have always reportet that they loved the conference. So this…","1-xRQoTgv9V8nWr1fLF13SM0SjtHqa6gog2KJo__WEI",{"id":47787,"title":47788,"author":47789,"body":47790,"category":47911,"date":47912,"description":47913,"extension":617,"link":47914,"meta":47915,"navigation":499,"path":47916,"seo":47917,"slug":47794,"stem":47918,"tags":47919,"teaser":47922,"__hash__":47923},"blog/blog/maven-2-inheritance-before-interpolation.md","Maven 2: Inheritance before Interpolation",[12981],{"type":11,"value":47791,"toc":47909},[47792,47795,47798,47805,47814,47821,47825,47828,47832,47836,47847,47850,47859,47862,47865,47872,47878,47881,47886,47891,47897,47900,47903,47906],[14,47793,47788],{"id":47794},"maven-2-inheritance-before-interpolation",[18,47796,47797],{},"Some days ago I came along a problem with our beloved build tool Maven2. Since this was the first real problem with the\ntool I could not solve or find a good workaround which I think is worth a blog post.",[18,47799,47800,47804],{},[585,47801,47803],{"href":24570,"rel":47802},[589],"Maven 2"," relies on a project descriptor for each project to build, which is XML in a file\ncalled pom.xml at the root of a project. Within that file you define how your project is to be built, what dependencies\nit needs and much more.",[18,47806,47807,47808,47813],{},"It is\na ",[585,47809,47812],{"href":47810,"rel":47811},"http://www.sonatype.com/people/2010/08/how-to-migrate-from-ant-to-maven-project-structure/",[589],"commonly used pattern"," to\navoid a monolithic build and divide your stuff to several smaller projects as the project grows (e.g. having an\napi-package, a core, an web-client and so on). Since all of these projects actually belong together you may want to\navoid duplication of xml within the project descriptors. So maven provides a powerful feature for this: pom *\n*inheritance**. Each project may have a project as parent from whose pom information gets inherited. Using this you can\ndefine common stuff like repositories, reporting-settings and project information at the the “parent-pom”.",[18,47815,47816,47817,986],{},"So one of the entries we defined in the parent-pom is the repository-node which tells Maven places where it looks for\npackages needed for the build (e.g. dependencies). In our case this points to a repository hosted by ourselves at an\ninstance of ",[585,47818,30698],{"href":47819,"rel":47820},"http://nexus.sonatype.org/",[589],[18,47822,36715,47823],{},[651,47824],{},[6555,47826,47827],{},"\ninternal.nexus.example.org\n",[47829,47830,47831],"name",{},"\nInternal Repo\n",[47833,47834,47835],"url",{},"\n${infrastructure.nexus.host}/content/groups/internal\n",[18,47837,47838,47839,47842,47843,47846],{},"As you can see we defined the hostname of the repository as a property which Maven resolves during the evaluation of the\nproject descriptor. This process is called ",[27,47840,47841],{},"Interpolation",". At some other place (within a pom or within global\nconfiguration) you are able to set the property ",[50,47844,47845],{},"infrastructure.nexus.host",". We did this since the hostnames are subject\nto change, as soon as the projects infrastructure is moved to our customers.",[18,47848,47849],{},"Ok, the first (solvabe) problem that occurred here is some kind of a chicken-egg problem. The parent itself is hosted\nat the internal repository which makes it impossible for maven to download before it knows from which repository.",[18,47851,47852,47853,47858],{},"So maven has to resolve inheritance first which means the repository, where the parent of a project can be downloaded\nmust be either defined in the child’s pom or the parent must be available in the\nglobal ",[585,47854,47857],{"href":47855,"rel":47856},"https://web.archive.org/web/20220313074201/https://repo1.maven.org/maven2/",[589],"maven central repository"," (which was\nnot an option for us). I know there is another way which means defining the repository in the global settings.xml file\nwhich I wanted to avoid because it brings along other, in my opinion even bigger, problems.",[18,47860,47861],{},"So we decided to add the repository-node (as seen above) to each projects’ pom.xml so that maven can download the\nparent project from the internal Nexus first. Since we still needed to be able to override the hostname the property\nstill needed to be defined somewhere else than in the parent (e.g. on commandline or using settings.xml).",[18,47863,47864],{},"But as soon as you try to build your project maven fails:",[18,47866,47867,47868],{},"`$ mvn validate -Dinfrastructure.nexus.host=",[585,47869,47870],{"href":47870,"rel":47871},"http://example.org",[589],[18,47873,47874,47877],{},[53,47875,47876],{},"INFO"," Scanning for projects...",[18,47879,47880],{},"Downloading: $\n{infrastructure.nexus.host}/content/groups/internal/org/example/parent/1.0.0-SNAPSHOT/parent-1.0.0-SNAPSHOT.pom",[18,47882,47883,47885],{},[53,47884,47876],{}," Unable to find resource 'org.example:parent:pom:1.0.0-SNAPSHOT' in repository internal.nexus.example.org ($\n{infrastructure.nexus.host}/content/groups/internal)",[18,47887,47888,47890],{},[53,47889,47876],{}," ------------------------------------------------------------------------",[18,47892,47893,47896],{},[53,47894,47895],{},"ERROR"," FATAL ERROR",[18,47898,47899],{},"...`",[18,47901,47902],{},"As you could see, in the URL where maven tried to download the parent-project it did not resolve the property.",[18,47904,47905],{},"This is because maven FIRST does inheritance and THEN interpolation. Its clear that it cannot do it the other way\nround (since the parent project may define properties that are needed for interpolation). But I cannot understand why\nmaven does not do FIRST interpolation, THEN inheritance and THEN interpolates again. Does anyone know?",[18,47907,47908],{},"To “solve” our problem we currently do not use interpolation within the child’s repository-configuration which sucks\nbecause it means to change a lot of pom.xml files later (and even released ones…).",{"title":48,"searchDepth":86,"depth":86,"links":47910},[],[613],"2010-10-27T11:12:04","Some days ago I came along a problem with our beloved build tool Maven2. Since this was the first real problem with the\\ntool I could not solve or find a good workaround which I think is worth a blog post.","https://synyx.de/blog/maven-2-inheritance-before-interpolation/",{},"/blog/maven-2-inheritance-before-interpolation",{"title":47788,"description":47797},"blog/maven-2-inheritance-before-interpolation",[27213,47920,10891,47921],"infrasturcture","project","Some days ago I came along a problem with our beloved build tool Maven2. Since this was the first real problem with the tool I could not solve or find…","DE9b4EbyThYYTZsKfp0jo0ecDbOKqlG1HLKUNXmIuYE",{"id":47925,"title":47926,"author":47927,"body":47929,"category":48014,"date":48015,"description":47936,"extension":617,"link":48016,"meta":48017,"navigation":499,"path":48018,"seo":48019,"slug":48020,"stem":48021,"tags":48022,"teaser":48024,"__hash__":48025},"blog/blog/simple-shell-script-to-use-dict-leo-org-in-your-shell.md","Simple Shell-Script to use dict.leo.org in your shell",[47928],"daniel",{"type":11,"value":47930,"toc":48012},[47931,47934,47937,47940,47943,47946,47953,47956,47964,47967,47970,47973,47976,47978,47980,47985,47988,47991,47994,47997,48000,48003,48006,48009],[14,47932,47926],{"id":47933},"simple-shell-script-to-use-dictleoorg-in-your-shell",[18,47935,47936],{},"Just create a new file like “vim leo”.",[18,47938,47939],{},"Insert the following script code:",[18,47941,47942],{},"`#!/bin/sh",[18,47944,47945],{},"t(){",[18,47947,47948,47949,47952],{},"while ",[53,47950,47951],{}," -n \"$1\"","; do",[18,47954,47955],{},"T=/tmp/$$.html",[18,47957,47958,47959],{},"lynx -source \"",[585,47960,47963],{"href":47961,"rel":47962},"http://dict.leo.org/?search=$1%22%7C",[589],"http://dict.leo.org/?search=$1\"|",[18,47965,47966],{},"grep results >$T",[18,47968,47969],{},"w3m -dump $T",[18,47971,47972],{},"rm $T",[18,47974,47975],{},"shift",[18,47977,14606],{},[18,47979,8659],{},[18,47981,47982,47984],{},[53,47983,47951],{},"&&\\",[18,47986,47987],{},"t \"$@\"||\\",[18,47989,47990],{},"while read -ep dict2> W; do",[18,47992,47993],{},"t $W|more",[18,47995,47996],{},"done`",[18,47998,47999],{},"You need lynx and w3m.",[18,48001,48002],{},"Make the file executable (chmod +x leo)",[18,48004,48005],{},"enjoy 😉",[18,48007,48008],{},"e.g. ./leo Übersetzung",[18,48010,48011],{},"I have found this script somewhere in the internet but I don’t know where, so I can not refrence to the original source.\nI apologize for this.",{"title":48,"searchDepth":86,"depth":86,"links":48013},[],[613],"2010-10-16T20:01:04","https://synyx.de/blog/simple-shell-script-to-use-dict-leo-org-in-your-shell/",{},"/blog/simple-shell-script-to-use-dict-leo-org-in-your-shell",{"title":47926,"description":47936},"simple-shell-script-to-use-dict-leo-org-in-your-shell","blog/simple-shell-script-to-use-dict-leo-org-in-your-shell",[48023,32782],"leo","Just create a new file like “vim leo”. Insert the following script code: #!/bin/sh t(){ while [ -n '$1' ]; do T=/tmp/$$.html lynx -source 'http://dict.leo.org/?search=$1'| grep results >$T w3m -dump…","9eBgoxgIENVKQIAOSW3KP4NaM4V9eUcPjxoOoIXTX9E",{"id":48027,"title":48028,"author":48029,"body":48030,"category":48066,"date":48067,"description":48068,"extension":617,"link":48069,"meta":48070,"navigation":499,"path":48071,"seo":48072,"slug":48034,"stem":48073,"tags":48074,"teaser":48075,"__hash__":48076},"blog/blog/context-reload-with-tomcat.md","Context reload with Tomcat",[42564],{"type":11,"value":48031,"toc":48064},[48032,48035,48042,48045,48050,48053,48056],[14,48033,48028],{"id":48034},"context-reload-with-tomcat",[18,48036,48037,48038,48041],{},"Ever wondered why ",[585,48039,30689],{"href":30687,"rel":48040},[589]," reloads the context when editing web.xml?",[18,48043,48044],{},"This is a default configuration that can also be adjusted to your needs. The file conf/context.xml is the default\ncontext configuration that is used for all webapps. In this file you can find the line",[18,48046,48047],{},[50,48048,48049],{},"\u003CWatchedResource>WEB-INF/web.xml\u003C/WatchedResource>",[18,48051,48052],{},"which triggers the reload for any web.xml.",[18,48054,48055],{},"You can either add more resources here or, preferably, add your own context configuration with your resources.",[18,48057,48058,48059,986],{},"Find out more about context configuration in\nthe ",[585,48060,48063],{"href":48061,"rel":48062},"http://tomcat.apache.org/tomcat-6.0-doc/config/context.html",[589],"Tomcat documentation",{"title":48,"searchDepth":86,"depth":86,"links":48065},[],[613,996],"2010-10-13T16:08:56","Ever wondered why Tomcat reloads the context when editing web.xml?","https://synyx.de/blog/context-reload-with-tomcat/",{},"/blog/context-reload-with-tomcat",{"title":48028,"description":48068},"blog/context-reload-with-tomcat",[27672],"Ever wondered why Tomcat reloads the context when editing web.xml? This is a default configuration that can also be adjusted to your needs. The file conf/context.xml is the default context…","rqgNbT2dAL4FFc59uZqZxj2Vd1hSH_5x_lWFp8y9NCs",{"id":48078,"title":48079,"author":48080,"body":48081,"category":48155,"date":48156,"description":48157,"extension":617,"link":48158,"meta":48159,"navigation":499,"path":48160,"seo":48161,"slug":48085,"stem":48162,"tags":48163,"teaser":48164,"__hash__":48165},"blog/blog/console-logging-with-opencms.md","Console logging with OpenCms",[42564],{"type":11,"value":48082,"toc":48153},[48083,48086,48091,48094,48118,48121,48148,48151],[14,48084,48079],{"id":48085},"console-logging-with-opencms",[18,48087,48088],{},[573,48089,48090],{},"We are currently in the process of cleaning up our internal wiki. A lot of information is quite outdated but it also\ncontains some useful snippets that we would like to share with the rest of the world.",[18,48092,48093],{},"The log4j ConsoleAppender is already configured in OpenCms log4j.properties but not enabled by default. To enable it\nchange the configuration",[43,48095,48097],{"className":34544,"code":48096,"language":34546,"meta":48,"style":48},"\nlog4j.rootLogger=\\\n ERROR,\\\n OC\n\n",[50,48098,48099,48103,48108,48113],{"__ignoreMap":48},[53,48100,48101],{"class":55,"line":56},[53,48102,500],{"emptyLinePlaceholder":499},[53,48104,48105],{"class":55,"line":86},[53,48106,48107],{},"log4j.rootLogger=\\\n",[53,48109,48110],{"class":55,"line":126},[53,48111,48112],{}," ERROR,\\\n",[53,48114,48115],{"class":55,"line":163},[53,48116,48117],{}," OC\n",[18,48119,48120],{},"to",[43,48122,48124],{"className":34544,"code":48123,"language":34546,"meta":48,"style":48},"\nlog4j.rootLogger=\\\n ERROR,\\\n OC,\\\n CONSOLE\n\n",[50,48125,48126,48130,48134,48138,48143],{"__ignoreMap":48},[53,48127,48128],{"class":55,"line":56},[53,48129,500],{"emptyLinePlaceholder":499},[53,48131,48132],{"class":55,"line":86},[53,48133,48107],{},[53,48135,48136],{"class":55,"line":126},[53,48137,48112],{},[53,48139,48140],{"class":55,"line":163},[53,48141,48142],{}," OC,\\\n",[53,48144,48145],{"class":55,"line":186},[53,48146,48147],{}," CONSOLE\n",[18,48149,48150],{},"This is useful when starting OpenCms from within an IDE like Netbeans or Eclipse which display the Console window by\ndefault.",[607,48152,989],{},{"title":48,"searchDepth":86,"depth":86,"links":48154},[],[613,996],"2010-10-12T18:44:46","We are currently in the process of cleaning up our internal wiki. A lot of information is quite outdated but it also\\ncontains some useful snippets that we would like to share with the rest of the world.","https://synyx.de/blog/console-logging-with-opencms/",{},"/blog/console-logging-with-opencms",{"title":48079,"description":48090},"blog/console-logging-with-opencms",[40376],"We are currently in the process of cleaning up our internal wiki. A lot of information is quite outdated but it also contains some useful snippets that we would like…","YLxxqXYBKnGrglUy2I9bdVGutI6JPiBeaHv7xk9P0FM",{"id":48167,"title":48168,"author":48169,"body":48171,"category":48240,"date":48241,"description":48242,"extension":617,"link":48243,"meta":48244,"navigation":499,"path":48245,"seo":48246,"slug":48248,"stem":48249,"tags":48250,"teaser":48253,"__hash__":48254},"blog/blog/mobile-solutions-summary-5.md","Mobile Solutions – Summary",[48170],"linsin",{"type":11,"value":48172,"toc":48238},[48173,48176,48190,48198,48203,48212],[14,48174,48168],{"id":48175},"mobile-solutions-summary",[18,48177,48178,48179,48184,48185,986],{},"It has been a while since my ",[585,48180,48183],{"href":48181,"rel":48182},"http://blog.synyx.de/2010/07/mobile-solutions-summary-2/",[589],"last update"," on our efforts over\nat the ",[585,48186,48189],{"href":48187,"rel":48188},"http://mobile.synyx.de/",[589],"mobile solutions blog",[18,48191,48192,48193,4101],{},"The most important announcement was without any doubt the imminent release of our new side\nproject: ",[585,48194,48197],{"href":48195,"rel":48196},"http://mobile.synyx.de/2010/07/i-think-i-spider/",[589],"“I think I spider”",[11259,48199,48200],{},[18,48201,48202],{},"The “I think I spider”App idea was born, when we discovered the corresponding web site. We had so much fun with it,\nthat we decided to bring all the joy to the major mobile platforms.",[18,48204,48205,48206,48211],{},"Although, we haven’t revealed any specific features yet, reading\nour ",[585,48207,48210],{"href":48208,"rel":48209},"http://mobile.synyx.de/category/our-apps/",[589],"vast number of blog posts"," on “I think I spider”, you’ve probably got an\nidea of what the App is gonna bring to you.",[18,48213,48214,48215,48220,48221,48226,48227,48231,48232,48237],{},"A very interesting topic\nis ",[585,48216,48219],{"href":48217,"rel":48218},"http://dlinsin.blogspot.com/2010/03/is-future-of-mobile-apps-web.html",[589],"cross-device mobile development",", which\nmight be where the future lies, especially with Apple’s\nrevised ",[585,48222,48225],{"href":48223,"rel":48224},"http://daringfireball.net/2010/09/app_store_guidelines",[589],"iOS Developer Program License Agreements",". Our very\nown ",[585,48228,47589],{"href":48229,"rel":48230},"http://mobile.synyx.de/authors/?uid=9",[589]," wrote\nan ",[585,48233,48236],{"href":48234,"rel":48235},"http://mobile.synyx.de/2010/09/on-cross-device-mobile-development-part-2/",[589],"extensive two-part piece"," on the state\nof cross-device mobile development, which is definitely worth a read.",{"title":48,"searchDepth":86,"depth":86,"links":48239},[],[613],"2010-09-10T08:44:39","It has been a while since my last update on our efforts over\\nat the mobile solutions blog.","https://synyx.de/blog/mobile-solutions-summary-5/",{},"/blog/mobile-solutions-summary-5",{"title":48168,"description":48247},"It has been a while since my last update on our efforts over\nat the mobile solutions blog.","mobile-solutions-summary-5","blog/mobile-solutions-summary-5",[5844,48251,48252,20508],"apple","iphone","It has been a while since my last update on our efforts over at the mobile solutions blog. The most important announcement was without any doubt the imminent release of…","JLhxo_R5oDX4p05l4611cjUW8lEQUM0SDnOGCfiC8yw",{"id":48256,"title":48168,"author":48257,"body":48258,"category":48339,"date":48340,"description":48341,"extension":617,"link":48342,"meta":48343,"navigation":499,"path":48344,"seo":48345,"slug":48347,"stem":48348,"tags":48349,"teaser":48350,"__hash__":48351},"blog/blog/mobile-solutions-summary-4.md",[48170],{"type":11,"value":48259,"toc":48337},[48260,48262,48275,48295,48303,48318,48323],[14,48261,48168],{"id":48175},[18,48263,48264,48265,48269,48270,48274],{},"There’s a lot going on over at the ",[585,48266,48189],{"href":48267,"rel":48268},"http://mobile.synyx.de",[589],", so in case you are not subscribed to\nour feed, which I hope you are, you can grab it ",[585,48271,10819],{"href":48272,"rel":48273},"http://mobile.synyx.de/feed/",[589],". In order to convince you to hook\nyour favorite reader up to our mobile blog, I’ll highlight a couple of blog posts for you.",[18,48276,48277,48282,48283,48288,48289,48294],{},[585,48278,48281],{"href":48279,"rel":48280},"http://mobile.synyx.de/authors/?uid=3",[589],"Tobias’","\npost ",[585,48284,48287],{"href":48285,"rel":48286},"http://mobile.synyx.de/2010/06/android-and-self-signed-ssl-certificates/",[589],"“Android and self-signed ssl certificates”","\ngained a lot of attraction over the past couple of weeks. He basically brings you up to speed on how to tweak Android’s\nversion of ",[585,48290,48293],{"href":48291,"rel":48292},"http://hc.apache.org/httpcomponents-client/httpclient/",[589],"Apache Commons Http"," to work with your own\ncertificate:",[11259,48296,48297,48300],{},[18,48298,48299],{},"Dealing with self-signed ssl certificates is a real pain, because it’s not that simple to add them in your app and\nlet android accept them.",[18,48301,48302],{},"But fortunately, there’s a workaround that uses an own SSLSocketFactory and an own TrustManager. With this, only your\nadded site is beeing able to be called, so theres no security issue.",[18,48304,48305,48306,48311,48312,48317],{},"Another blog post I’d like to point out is by yours truly,\non ",[585,48307,48310],{"href":48308,"rel":48309},"http://mobile.synyx.de/2010/06/ui-prototyping-iphone-apps/",[589],"“UI Prototyping iPhone Apps”",". It covers a very simple\nconcept and provides you with a ",[585,48313,48316],{"href":48314,"rel":48315},"http://github.com/dlinsin/district9/tree/master/UIPrototyping/",[589],"framework"," to employ it\nin your App development:",[11259,48319,48320],{},[18,48321,48322],{},"I watched a whole bunch of sessions from 2009. Among others a session on “Prototyping iPhone User Interfaces” by Bret\nVictor… In his session, Bret shows how to prototype an interface only by using with screenshots! … It inspired me to use\nhis framework and the whole process for our own development… Unfortunately, the code for the session isn’t available …\nAfter some digging, I found Michael Fey’s blog, who was able to successfully reverse engineer the missing parts, which\nwere not shown in the presentation.",[18,48324,48325,48326,48330,48331,48336],{},"I hope by this time you have already subscribed to our ",[585,48327,48329],{"href":48267,"rel":48328},[589],"mobile blog"," and discovered a couple\nof ",[585,48332,48335],{"href":48333,"rel":48334},"http://mobile.synyx.de/2010/07/split-nsstring-by-characters/",[589],"interesting posts",", that our team put together over\npast couple of months.",{"title":48,"searchDepth":86,"depth":86,"links":48338},[],[613],"2010-07-16T07:28:03","There’s a lot going on over at the mobile solutions blog, so in case you are not subscribed to\\nour feed, which I hope you are, you can grab it here. In order to convince you to hook\\nyour favorite reader up to our mobile blog, I’ll highlight a couple of blog posts for you.","https://synyx.de/blog/mobile-solutions-summary-4/",{},"/blog/mobile-solutions-summary-4",{"title":48168,"description":48346},"There’s a lot going on over at the mobile solutions blog, so in case you are not subscribed to\nour feed, which I hope you are, you can grab it here. In order to convince you to hook\nyour favorite reader up to our mobile blog, I’ll highlight a couple of blog posts for you.","mobile-solutions-summary-4","blog/mobile-solutions-summary-4",[5844,48251,48252,20508],"There’s a lot going on over at the mobile solutions blog, so in case you are not subscribed to our feed, which I hope you are, you can grab it…","6e3hlP9lM0lmBm1BXb-OBwSJM2DFtSNp5TIpHdJ1OXE",{"id":48353,"title":48354,"author":48355,"body":48356,"category":48440,"date":48441,"description":48442,"extension":617,"link":48443,"meta":48444,"navigation":499,"path":48445,"seo":48446,"slug":48360,"stem":48448,"tags":48449,"teaser":48451,"__hash__":48452},"blog/blog/creating-a-mysql-dump-for-jdbc.md","Creating a MySQL dump for JDBC",[42564],{"type":11,"value":48357,"toc":48438},[48358,48361,48370,48379,48382,48391,48419,48423,48426,48435],[14,48359,48354],{"id":48360},"creating-a-mysql-dump-for-jdbc",[18,48362,48363,48364,48369],{},"I have just been fighting with\nthe ",[585,48365,48368],{"href":48366,"rel":48367},"https://web.archive.org/web/20150526005826/http://mojo.codehaus.org:80/sql-maven-plugin/",[589],"sql-maven-plugin"," while\ntrying to import an OpenCms MySQL dump automatically. The module wasn’t really verbose with an error message:",[18,48371,48372,48375,48376],{},[50,48373,48374],{},"[ERROR] Failed to execute: INSERT INTO ","CMS_CONTENTS",[50,48377,48378],{}," VALUES",[18,48380,48381],{},"followed by printing the rest of the dump to the standard output.",[18,48383,48384,48385,48390],{},"Fortunately my colleague ",[585,48386,48389],{"href":48387,"rel":48388},"http://blog.synyx.de/autoren/?uid=16",[589],"Marc"," pointed me to the right direction: The dump\ncontains some statements that can’t be executed by JDBC. To create a correct dump for usage with pure JDBC or with the\nsql-maven-plugin the following command has to be applied:",[43,48392,48394],{"className":30754,"code":48393,"language":30756,"meta":48,"style":48},"mysqldump --hex-blob -u root -p opencms752 > install/opencms752.sql\n\n",[50,48395,48396],{"__ignoreMap":48},[53,48397,48398,48400,48403,48406,48408,48411,48414,48416],{"class":55,"line":56},[53,48399,30736],{"class":59},[53,48401,48402],{"class":89}," --hex-blob",[53,48404,48405],{"class":89}," -u",[53,48407,15048],{"class":63},[53,48409,48410],{"class":89}," -p",[53,48412,48413],{"class":63}," opencms752",[53,48415,32413],{"class":389},[53,48417,48418],{"class":63}," install/opencms752.sql\n",[18,48420,48421],{},[27,48422,36478],{},[18,48424,48425],{},"To prevent that some characters are encoded incorrectly, add the following to the configuration options:",[43,48427,48429],{"className":3792,"code":48428,"language":3794,"meta":48,"style":48},"\u003CescapeProcessing>false\u003C/escapeProcessing>\n",[50,48430,48431],{"__ignoreMap":48},[53,48432,48433],{"class":55,"line":56},[53,48434,48428],{},[607,48436,48437],{},"html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html pre.shiki code .szBVR, html code.shiki .szBVR{--shiki-default:#D73A49;--shiki-dark:#F97583}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}",{"title":48,"searchDepth":86,"depth":86,"links":48439},[],[613],"2010-07-09T16:38:56","I have just been fighting with\\nthe sql-maven-plugin while\\ntrying to import an OpenCms MySQL dump automatically. The module wasn’t really verbose with an error message:","https://synyx.de/blog/creating-a-mysql-dump-for-jdbc/",{},"/blog/creating-a-mysql-dump-for-jdbc",{"title":48354,"description":48447},"I have just been fighting with\nthe sql-maven-plugin while\ntrying to import an OpenCms MySQL dump automatically. The module wasn’t really verbose with an error message:","blog/creating-a-mysql-dump-for-jdbc",[43020,10891,48450,40376],"mysql","I have just been fighting with the sql-maven-plugin while trying to import an OpenCms MySQL dump automatically. The module wasn’t really verbose with an error message: [ERROR] Failed to execute:…","utIfI3bzTXA2v77k7OBCmKNqGmfWzL5W5-UgX284X1A",{"id":48454,"title":48455,"author":48456,"body":48457,"category":48544,"date":48545,"description":48546,"extension":617,"link":48547,"meta":48548,"navigation":499,"path":48549,"seo":48550,"slug":48461,"stem":48552,"tags":48553,"teaser":48554,"__hash__":48555},"blog/blog/servlet-container-options-for-maven.md","Servlet container options for Maven",[42564],{"type":11,"value":48458,"toc":48542},[48459,48462,48475,48482,48490,48497,48504,48507,48534,48537,48540],[14,48460,48455],{"id":48461},"servlet-container-options-for-maven",[18,48463,48464,48465,48468,48469,48474],{},"When developing web apps with ",[585,48466,37782],{"href":24570,"rel":48467},[589]," the de facto standard for running the app is to use the\nexcellent ",[585,48470,48473],{"href":48471,"rel":48472},"https://web.archive.org/web/20150520205353/https://docs.codehaus.org/display/JETTY/Maven+Jetty+Plugin",[589],"Maven Jetty Plugin","\nwhich runs the project in an embedded Jetty server. When configured, it can either run the project from the war file\ndirectly via mvn jetty:run or in exploded mode where the war is unpacked before being run (mvn jetty:run-exploded).\nThis noticably speeds up development as there is no need to manually deploy the artifact to a server.",[18,48476,48477,48478,48481],{},"But if the production system does not run on Jetty but on ",[585,48479,30689],{"href":30687,"rel":48480},[589]," you might run into some\nproblems:",[577,48483,48484,48487],{},[580,48485,48486],{},"Some redirects from AJAX calls do work on Jetty but do not work on Tomcat",[580,48488,48489],{},"When submitting some forms on Jetty the parameters get lost",[18,48491,48492,48493,48496],{},"The latter can be noticed when running ",[585,48494,41231],{"href":47468,"rel":48495},[589]," on Jetty: Saving from the editor causes an error\nbecause the parameters are not available to OpenCms.",[18,48498,48499,48500,48503],{},"Fortunately there is a viable alternative:\nThe ",[585,48501,42587],{"href":42585,"rel":48502},[589],".\nBesides providing several options to deploy artifacts to an external server it can also be run in an embedded mode.",[18,48505,48506],{},"This is how the plugin is configured:",[43,48508,48510],{"className":3792,"code":48509,"language":3794,"meta":48,"style":48},"\n\u003Cplugin>\n \u003CgroupId>org.codehaus.mojo\u003C/groupId>\n \u003CartifactId>tomcat-maven-plugin\u003C/artifactId>\n\u003C/plugin>\n\n",[50,48511,48512,48516,48520,48525,48530],{"__ignoreMap":48},[53,48513,48514],{"class":55,"line":56},[53,48515,500],{"emptyLinePlaceholder":499},[53,48517,48518],{"class":55,"line":86},[53,48519,22511],{},[53,48521,48522],{"class":55,"line":126},[53,48523,48524],{}," \u003CgroupId>org.codehaus.mojo\u003C/groupId>\n",[53,48526,48527],{"class":55,"line":163},[53,48528,48529],{}," \u003CartifactId>tomcat-maven-plugin\u003C/artifactId>\n",[53,48531,48532],{"class":55,"line":186},[53,48533,22591],{},[18,48535,48536],{},"It can be run using mvn tomcat:run and mvn tomcat:run-war for running in unpacked war mode.",[18,48538,48539],{},"Using this plugin you can be sure that the features your servlet container provides in production are the same during\ndevelopment.",[607,48541,989],{},{"title":48,"searchDepth":86,"depth":86,"links":48543},[],[613],"2010-07-09T08:48:36","When developing web apps with Maven the de facto standard for running the app is to use the\\nexcellent Maven Jetty Plugin\\nwhich runs the project in an embedded Jetty server. When configured, it can either run the project from the war file\\ndirectly via mvn jetty:run or in exploded mode where the war is unpacked before being run (mvn jetty:run-exploded).\\nThis noticably speeds up development as there is no need to manually deploy the artifact to a server.","https://synyx.de/blog/servlet-container-options-for-maven/",{},"/blog/servlet-container-options-for-maven",{"title":48455,"description":48551},"When developing web apps with Maven the de facto standard for running the app is to use the\nexcellent Maven Jetty Plugin\nwhich runs the project in an embedded Jetty server. When configured, it can either run the project from the war file\ndirectly via mvn jetty:run or in exploded mode where the war is unpacked before being run (mvn jetty:run-exploded).\nThis noticably speeds up development as there is no need to manually deploy the artifact to a server.","blog/servlet-container-options-for-maven",[7611,290,22972,10891,27672],"When developing web apps with Maven the de facto standard for running the app is to use the excellent Maven Jetty Plugin which runs the project in an embedded Jetty…","fRYI1HRQAdsyYeA7rnRcKDEpC-Azc63aZtMg04FdDvU",{"id":48557,"title":48168,"author":48558,"body":48559,"category":48670,"date":48671,"description":48672,"extension":617,"link":48673,"meta":48674,"navigation":499,"path":48675,"seo":48676,"slug":48678,"stem":48679,"tags":48680,"teaser":48681,"__hash__":48682},"blog/blog/mobile-solutions-summary-3.md",[48170],{"type":11,"value":48560,"toc":48668},[48561,48563,48583,48597,48635,48653,48660],[14,48562,48168],{"id":48175},[18,48564,48565,48566,48571,48572,48577,48578,48582],{},"In my ",[585,48567,48570],{"href":48568,"rel":48569},"http://blog.synyx.de/2010/05/mobile-solutions-%E2%80%93-summary/",[589],"last summary"," I forgot to mention, that I will be\nat ",[585,48573,48576],{"href":48574,"rel":48575},"http://developer.apple.com/wwdc",[589],"WWDC"," this year. Now WWDC is over, I just got back and if you want to know more,\ncheckout my blog post ",[585,48579,10819],{"href":48580,"rel":48581},"http://dlinsin.blogspot.com/2010/06/wwdc10.html",[589],". While I was gone, our team was busy\npublishing all sorts of good stuff!",[18,48584,48585,48586,48591,48592,48596],{},"Let’s start with the release of Android’s FroYo last\nmonth! ",[585,48587,48590],{"href":48588,"rel":48589},"http://mobile.synyx.de/2010/05/in-my-humble-opinion-froyo-rocks/",[589],"Achim"," wrote\na ",[585,48593,48595],{"href":48588,"rel":48594},[589],"nice blog post"," highlighting the noteworthy\nfeatures:",[11259,48598,48599,48602,48605,48613,48616,48624,48627],{},[18,48600,48601],{},"*FroYo is like each previous version a mixture between API Changes, new Userfeatures and some new cool Apps….",[18,48603,48604],{},"New API-Features:*",[577,48606,48607,48610],{},[580,48608,48609],{},"Data Backup API",[580,48611,48612],{},"Possibility to save passwords secure",[18,48614,48615],{},"New User Features:",[577,48617,48618,48621],{},[580,48619,48620],{},"updated Exchange Features",[580,48622,48623],{},"Remote Wipe",[18,48625,48626],{},"New Apps:",[577,48628,48629,48632],{},[580,48630,48631],{},"Camera and Camcorder updated (possible to enable manually the leds for usage within camcorder)",[580,48633,48634],{},"Android Tethering and Usage as Hotspot",[18,48636,48637,48641,48642,48647,48648,4101],{},[585,48638,47589],{"href":48639,"rel":48640},"http://mobile.synyx.de/authors/?uid=6",[589],", our ",[585,48643,48646],{"href":48644,"rel":48645},"http://mobile.synyx.de/tag/maemo-5/",[589],"Maemo"," guy, followed our\nGoogle Maps theme and published the first part of a nice introduction\nof ",[585,48649,48652],{"href":48650,"rel":48651},"http://mobile.synyx.de/2010/06/google-maps-on-maemo-5-part-1/",[589],"Google Maps on Maemo 5",[11259,48654,48655],{},[18,48656,48657],{},[573,48658,48659],{},"The idea is quite simple. Webkit will render a webpage insider your app. That webpage consists of javascript methods\nwhich use the Google Maps-API. The javascript methods can be triggered from the app. The map class acts as proxy for\nthe communication between your app and the website. Quite simple hm?",[18,48661,48662,48663,48667],{},"This is only a small sneak peak of what’s going on over at the mobile solutions blog. I suggest\nyou ",[585,48664,48666],{"href":48187,"rel":48665},[589],"add it"," to your favorite feed reader and check it out regularly.",{"title":48,"searchDepth":86,"depth":86,"links":48669},[],[613],"2010-06-18T08:57:09","In my last summary I forgot to mention, that I will be\\nat WWDC this year. Now WWDC is over, I just got back and if you want to know more,\\ncheckout my blog post here. While I was gone, our team was busy\\npublishing all sorts of good stuff!","https://synyx.de/blog/mobile-solutions-summary-3/",{},"/blog/mobile-solutions-summary-3",{"title":48168,"description":48677},"In my last summary I forgot to mention, that I will be\nat WWDC this year. Now WWDC is over, I just got back and if you want to know more,\ncheckout my blog post here. While I was gone, our team was busy\npublishing all sorts of good stuff!","mobile-solutions-summary-3","blog/mobile-solutions-summary-3",[48251,48252,20508],"In my last summary I forgot to mention, that I will be at WWDC this year. Now WWDC is over, I just got back and if you want to know…","Vt3IIuwgnXxkVeplkiSoihd83Xam9gAc_bmG2yhTjts",{"id":48684,"title":48685,"author":48686,"body":48687,"category":48832,"date":48833,"description":48834,"extension":617,"link":48835,"meta":48836,"navigation":499,"path":48837,"seo":48838,"slug":48691,"stem":48839,"tags":48840,"teaser":48843,"__hash__":48844},"blog/blog/template-based-document-generation-using-odfdom.md","Template based document generation using ODFDOM",[42564],{"type":11,"value":48688,"toc":48826},[48689,48692,48695,48710,48714,48723,48734,48737,48741,48749,48752,48755,48758,48761,48764,48766,48769,48776,48781,48784,48787,48790,48793,48796,48799,48802,48805,48808,48811,48813,48815,48818,48821,48823],[14,48690,48685],{"id":48691},"template-based-document-generation-using-odfdom",[18,48693,48694],{},"Generating documents from data that is managed by a web application is a quite common need. Think about letters that are\ngenerated for a customer relationship management system or bills that are to be send for membership fees. For corporate\nidentity reasons you don’t want these documents to look like generated from a plain text file but you want to have\nlogos, tables, address labels and so on.",[18,48696,48697,48698,48703,48704,48709],{},"As the people that are designing the look of these documents often are not programmers it is a good idea to provide a\nway to use well know tools for creating and editing templates for these documents. What we have been doing for some time\nis to let the customer create template documents using ",[585,48699,48702],{"href":48700,"rel":48701},"http://www.openoffice.org/",[589],"OpenOffice.org",", the open source\nword processor, and transform these documents programmatically. OpenOffice.org uses the\nstandardized ",[585,48705,48708],{"href":48706,"rel":48707},"http://www.oasis-open.org/committees/tc_home.php?wg_abbrev=office",[589],"Open Document"," format to save its\ndocuments. Open Document files are zip archives that contain some XML documents as well as additional content (images,\nmacros, …).",[649,48711,48713],{"id":48712},"the-uno-approach","The UNO approach",[18,48715,48716,48717,48722],{},"One of the older approaches we have been using for document processing is to access an OpenOffice.org instance running\non the server using the ",[585,48718,48721],{"href":48719,"rel":48720},"http://wiki.services.openoffice.org/wiki/Uno",[589],"UNO API",". UNO is a language agnostic API that\nprovides access to a lot of functionality of OpenOffice.org using an IDL. Though really powerful this approach also\nyields some drawbacks:",[577,48724,48725,48728,48731],{},[580,48726,48727],{},"Understanding and learning the UNO API is hard and takes a lot of time",[580,48729,48730],{},"Some features of a document can’t be accessed using the API (e.g. the id of form control elements is saved in the\ndocument but is not accessible using UNO)",[580,48732,48733],{},"An instance of OpenOffice.org can only be used by one thread at a time so you need some kind of instance pooling.",[18,48735,48736],{},"These drawbacks make it really hard to design and implement a robust system that can handle the load of a typical web\napplication and can still be maintained by a lot of developers.",[649,48738,48740],{"id":48739},"odfdom","ODFDOM",[18,48742,48743,48744,48748],{},"Some time after the standardization of the Open Document format a new project was\nborn: ",[585,48745,48740],{"href":48746,"rel":48747},"http://odftoolkit.org/projects/odfdom/pages/Home",[589],", a sub project of the odftoolkt project. ODFDOM is a\npure Java API that provides both low level DOM access to the Open Document XML format as well as convenience\nfunctionality to manipulate document data.",[18,48750,48751],{},"As with ODFDOM the application and the document generation all run on the Java Virtual Machine it is easier to maintain\nfrom an adminitrators perspective. Also in contrast to the UNO API ODFDOM is really easy to use.",[18,48753,48754],{},"The following snippet creates a new text document, inserts some text and saves the document to a temp file.",[18,48756,48757],{},"`OdfTextDocument doc = OdfTextDocument.newTextDocument();",[18,48759,48760],{},"doc.addText(\"Hello World!\");",[18,48762,48763],{},"doc.save(File.createTempFile(\"odfdom\", \".odt\"));`",[649,48765,41258],{"id":41257},[18,48767,48768],{},"To use Odfdom for templating you can choose one of the many placeholder approaches in OpenOffice.org. A very simple one\nis the use of user fields. To insert a user field in OpenOffice.org create a new document and go to Insert => Field\ncommand => Others. There you choose the tab variables and user field. You can add a name and a value. The value in our\ncase is only there to have a visual feedback when designing the document. The user field will be replaced automatically.",[18,48770,48771,48772,48775],{},"Let’s see how we can replace our placeholder value. The values for user fields as inserted above are stored in a node\n",[585,48773,48774],{"href":48774},"text:user-field-decl",". This is an excerpt from the Open Document content.xml for a simple example document:",[18,48777,48778],{},[50,48779,48780],{},"\u003Ctext:user-field-decl office:value-type=\"string\" office:string-value=\"hello\" text:name=\"test\"/>",[18,48782,48783],{},"The user field is named test, it’s initial value for visual feedback is set to “hello”.",[18,48785,48786],{},"Imagine that the data that we want to replace with the values in the template is stored in a simple Map of Strings. To\nreplace all dummy values with values from you application you can access the nodes using the method\ngetElementsByTagName(“element”):",[18,48788,48789],{},"`Map\u003CString, String> values = new HashMap\u003CString, String>();",[18,48791,48792],{},"values.put(\"test\", \"inserted automatically\");",[18,48794,48795],{},"OdfDocument doc = OdfDocument.loadDocument(\"/path/to/template.odt\");",[18,48797,48798],{},"NodeList nodes = doc.getOfficeBody().getElementsByTagName(OdfTextUserFieldDecl.ELEMENT_NAME.getQName());",[18,48800,48801],{},"for (int i = 0; i \u003C nodes.getLength(); i++) {",[18,48803,48804],{},"OdfTextUserFieldDecl element = (OdfTextUserFieldDecl) nodes.item(i);",[18,48806,48807],{},"if (values.containsKey(element.getTextNameAttribute())) {",[18,48809,48810],{},"element.setOfficeStringValueAttribute(values.get(element.getTextNameAttribute()));",[18,48812,8659],{},[18,48814,8659],{},[18,48816,48817],{},"doc.save(\"/path/to/result.odt\");`",[18,48819,48820],{},"When running the code above, the value in the document is replaced with the value set programmatically.",[649,48822,12384],{"id":12383},[18,48824,48825],{},"So far we are running code using ODFDOM for document generation successfully in two larger projects that have been\ndeveloped recently. We believe that ODFDOM will help us delivering additional value for our customers with less\ndevelopment effort.",{"title":48,"searchDepth":86,"depth":86,"links":48827},[48828,48829,48830,48831],{"id":48712,"depth":126,"text":48713},{"id":48739,"depth":126,"text":48740},{"id":41257,"depth":126,"text":41258},{"id":12383,"depth":126,"text":12384},[613,996],"2010-06-13T17:57:40","Generating documents from data that is managed by a web application is a quite common need. Think about letters that are\\ngenerated for a customer relationship management system or bills that are to be send for membership fees. For corporate\\nidentity reasons you don’t want these documents to look like generated from a plain text file but you want to have\\nlogos, tables, address labels and so on.","https://synyx.de/blog/template-based-document-generation-using-odfdom/",{},"/blog/template-based-document-generation-using-odfdom",{"title":48685,"description":48694},"blog/template-based-document-generation-using-odfdom",[48841,290,48739,48842],"document-management","openoffice-org","Generating documents from data that is managed by a web application is a quite common need. Think about letters that are generated for a customer relationship management system or bills…","s3AA3WrDYmRY0S6MePU2SvOKGaEOPjNDiiAX_h4Pn5U",{"id":48846,"title":48847,"author":48848,"body":48849,"category":48879,"date":48880,"description":48881,"extension":617,"link":48882,"meta":48883,"navigation":499,"path":48884,"seo":48885,"slug":48887,"stem":48888,"tags":48889,"teaser":48891,"__hash__":48892},"blog/blog/uberladen-vs-trivialisieren-java-magazin-artikel.md","Überladen vs. Trivialisieren – Zwischen Platin und Blech",[43256],{"type":11,"value":48850,"toc":48877},[48851,48854,48874],[14,48852,48847],{"id":48853},"überladen-vs-trivialisieren-zwischen-platin-und-blech",[18,48855,48856,48857,48861,48862,48867,48868,48873],{},"Das Interview mit Joachim Arrasz, Softwarearchitekt bei ",[585,48858,48860],{"href":19643,"rel":48859},[589],"Synyx GmbH & Co. KG",",\nund ",[585,48863,48866],{"href":48864,"rel":48865},"http://www.pbit.org/index.html",[589],"Pavlo Baron",", Enterprise Architekt in München, ist nun\nim ",[585,48869,48872],{"href":48870,"rel":48871},"http://it-republik.de/jaxenter/java-magazin-ausgaben/Lucene-000399.html",[589],"Java Magazin"," erschienen. In der Rubrik\n“Architektur” erschien der Artikel mit dem Titel “Überladen vs. Trivialisieren – Zwischen Platin und Blech”.",[18,48875,48876],{},"In der Diskussion geht es um die Balance zwischen minimalistischen und total überzogenen Lösungswegen in der\nSoftwarearchitektur. Anhand eines konkreten Beispiels aus der Praxis diskutieren Joachim Arrasz und Pavlo Baron, wie man\neine Lösung überladen oder allzu sehr vereinfachen kann. Und vor allem: Wie findet man die goldene Mitte?",{"title":48,"searchDepth":86,"depth":86,"links":48878},[],[613],"2010-06-08T13:28:00","Das Interview mit Joachim Arrasz, Softwarearchitekt bei Synyx GmbH & Co. KG,\\nund Pavlo Baron, Enterprise Architekt in München, ist nun\\nim Java Magazin erschienen. In der Rubrik\\n“Architektur” erschien der Artikel mit dem Titel “Überladen vs. Trivialisieren – Zwischen Platin und Blech”.","https://synyx.de/blog/uberladen-vs-trivialisieren-java-magazin-artikel/",{},"/blog/uberladen-vs-trivialisieren-java-magazin-artikel",{"title":48847,"description":48886},"Das Interview mit Joachim Arrasz, Softwarearchitekt bei Synyx GmbH & Co. KG,\nund Pavlo Baron, Enterprise Architekt in München, ist nun\nim Java Magazin erschienen. In der Rubrik\n“Architektur” erschien der Artikel mit dem Titel “Überladen vs. Trivialisieren – Zwischen Platin und Blech”.","uberladen-vs-trivialisieren-java-magazin-artikel","blog/uberladen-vs-trivialisieren-java-magazin-artikel",[48890,13173],"losungsweg","Das Interview mit Joachim Arrasz, Softwarearchitekt bei Synyx GmbH & Co. KG, und Pavlo Baron, Enterprise Architekt in München, ist nun im Java Magazin erschienen. In der Rubrik “Architektur” erschien…","nhsOQi94sEgrfo5KsCn9ZYMiAsYPQWlLxJhTZ_KBJOA",{"id":48894,"title":48895,"author":48896,"body":48897,"category":48992,"date":48993,"description":48904,"extension":617,"link":48994,"meta":48995,"navigation":499,"path":48996,"seo":48997,"slug":48998,"stem":48999,"tags":49000,"teaser":49001,"__hash__":49002},"blog/blog/5-reasons-for-teams.md","Five reasons why you should not work alone on IT-Projects",[12981],{"type":11,"value":48898,"toc":48985},[48899,48902,48905,48908,48911,48915,48924,48927,48931,48940,48944,48947,48950,48954,48957,48960,48964,48967,48976],[14,48900,48895],{"id":48901},"five-reasons-why-you-should-not-work-alone-on-it-projects",[18,48903,48904],{},"In my opinion its much better to have a team working on a project than a single person.",[18,48906,48907],{},"Even if this means that your customer might have to wait a bit longer for his project to start (because other projects\nalso occupy more people) everybody benefits because of increased productivity, better code and happy team members.",[18,48909,48910],{},"Here are my top five reasons why you should not leave one guy alone with a IT project…",[649,48912,48914],{"id":48913},"avoid-single-points-of-failure","Avoid Single Points of Failure",[18,48916,48917,48918,48923],{},"People get sick, are on vacation or might even resign from their job. You have to be able to compensate this by having\nother members that don’t need weeks or months to understand the projects requirements or codebase.\nAvoiding ",[585,48919,48922],{"href":48920,"rel":48921},"http://en.wikipedia.org/wiki/Single_Point_of_Failure",[589],"single points of failures"," saves you from having to get\nnew (other) people up-to-date which will cost you time, money and probably even upsets your customer.",[18,48925,48926],{},"Additionally your customer might ask for enhancements, bugfixes or even new (related) applications any time after the\noriginal project is finished. People that were involved on that project might be working on all kind of other projects\nthen. If you have more than one guy that knows the domain and the code then you gain alot of flexibility in resource\nmanagement.",[649,48928,48930],{"id":48929},"think-twice-triply","Think Twice / Triply / …",[18,48932,48933,48934,48939],{},"Another big benefit is, that team members have somebody to discuss any tasks with. These discussions might be about how\nto design a special feature or how the customers domain is modeled best. The members can save each other from producing\nbugs by reviewing each others code. They can also\nuse ",[585,48935,48938],{"href":48936,"rel":48937},"http://www.extremeprogramming.org/rules/pair.html",[589],"Pairprogramming"," for tricky parts of the application.",[649,48941,48943],{"id":48942},"individual-skills","Individual Skills",[18,48945,48946],{},"Each member of your team also brings his special and individual skills and expirience. One guy might be better when it\ncomes down to software architecture, another one might be the best choice to communicate with the customer and a third\nmight be an expert at designing user interfaces.",[18,48948,48949],{},"Since IT-Projects require alot of different skills each of them will benefit from an increased bandwidth of skills.",[649,48951,48953],{"id":48952},"motivation","Motivation",[18,48955,48956],{},"IT-Projects can be frustrating sometimes. One person that works alone gets demotivated easily because he feels left\nalone with whatever is frustrating him.",[18,48958,48959],{},"Being able to talk about problems and motivating each other helps to stay in a good temper and thus be more productive.\nHaving a good team and fun at work helps to endure frustrating parts of a project.",[649,48961,48963],{"id":48962},"distraction","Distraction",[18,48965,48966],{},"If someone has to accomplish everything by himself he might also get easy distracted. He might start browsing the web or\nhe pays more attention his colleagues projects than to his own. But he will probably stay focused if you have a team\nthat works with him, because he has someone to justify himself to.",[18,48968,48969,48970,48975],{},"A small daily standup-meeting (e.g. a ",[585,48971,48974],{"href":48972,"rel":48973},"http://www.scrumbasics.com/conducting-daily-scrum-meeting/",[589],"daily SCRUM",") where\neveryone explains what he has done the last day can help the team to stay focused.",[18,48977,48978,48979,48984],{},"Imagine how guilty you’d feel if everyone worked hard and you only watched videos on ",[585,48980,48983],{"href":48981,"rel":48982},"http://www.youtube.com/",[589],"youtube","\ninstead of writing a unit test for the feature you implemented the day before.",{"title":48,"searchDepth":86,"depth":86,"links":48986},[48987,48988,48989,48990,48991],{"id":48913,"depth":126,"text":48914},{"id":48929,"depth":126,"text":48930},{"id":48942,"depth":126,"text":48943},{"id":48952,"depth":126,"text":48953},{"id":48962,"depth":126,"text":48963},[613],"2010-05-25T11:03:19","https://synyx.de/blog/5-reasons-for-teams/",{},"/blog/5-reasons-for-teams",{"title":48895,"description":48904},"5-reasons-for-teams","blog/5-reasons-for-teams",[45796,47921,24907],"In my opinion its much better to have a team working on a project than a single person. Even if this means that your customer might have to wait a…","IVwgKz9GQYkBCYmZch_5lMbuzLkIv6gZqFpTi0fFih8",{"id":49004,"title":48168,"author":49005,"body":49006,"category":49105,"date":49106,"description":49107,"extension":617,"link":49108,"meta":49109,"navigation":499,"path":49110,"seo":49111,"slug":49113,"stem":49114,"tags":49115,"teaser":49116,"__hash__":49117},"blog/blog/mobile-solutions-summary-2.md",[48170],{"type":11,"value":49007,"toc":49103},[49008,49010,49024,49043,49053,49059,49068,49092],[14,49009,48168],{"id":48175},[18,49011,49012,49013,49018,49019,49023],{},"It has been quite a while since ",[585,49014,49017],{"href":49015,"rel":49016},"http://blog.synyx.de/2010/04/30/mobile-solutions-summary/",[589],"my last update"," on what’s\nhappening over at our ",[585,49020,49022],{"href":48267,"rel":49021},[589],"Mobile Solutions Blog",", so let’s get right into it.",[18,49025,49026,49027,43055,49032,49036,49037,49042],{},"One of the most interesting posts\nwas ",[585,49028,49031],{"href":49029,"rel":49030},"http://mobile.synyx.de/2010/05/10/user-statistics-from-synyxsudoku/",[589],"“User statistics from SynyxSudoku”",[585,49033,49035],{"href":48279,"rel":49034},[589],"Tobias",". It shows the distribution of Android devices, which are running our\nvery own Sudoku App, which by the\nway ",[585,49038,49041],{"href":49039,"rel":49040},"http://mobile.synyx.de/2010/05/10/synyxsudoku-update-to-version-1-02/",[589],"got updated"," a couple of weeks ago:",[11259,49044,49045,49048,49050],{},[18,49046,49047],{},"*First of all, I was quite surprised as i saw that 70% of the SynyxSudoku users that uploaded their highscores have\nalso agreed to send us their device specific data, because I really didn’t expect more than 10-20%.",[18,49049,24035],{},[18,49051,49052],{},"The devices came mostly with the latest Android versions available. … The smaller devices (240×320) aren’t that\npopular as it seems (maybe because there’s only the HTC Tattoo that uses this resolution), but the others are quite\nevenly matched.*",[18,49054,49055,49056,986],{},"If you are into Android and need some hard facts, go and checkout the numbers on\nour ",[585,49057,49022],{"href":49029,"rel":49058},[589],[18,49060,49061,49062,49067],{},"We were looking for a consistent example to show how to solve a problem over the 3 major platforms iPhone, Android and\nMaemo and ",[585,49063,49066],{"href":49064,"rel":49065},"http://maps.google.com",[589],"Google Maps"," seemed to be a nice show case.",[18,49069,49070,49071,49076,49077,49082,49083,49086,49087,986],{},"We decided to start a little ",[585,49072,49075],{"href":49073,"rel":49074},"http://mobile.synyx.de/tag/google-maps/",[589],"tutorial series",", which results in three\nhands-on examples of how to integrate Google Maps in your application. There\nare ",[585,49078,49081],{"href":49079,"rel":49080},"http://mobile.synyx.de/2010/05/07/google-maps-on-android-part-2-overlays/",[589],"code samples for Android"," and you can\nalready download a ",[573,49084,49085],{},"UIViewController",", which\nshows ",[585,49088,49091],{"href":49089,"rel":49090},"http://mobile.synyx.de/2010/05/19/how-to-add-a-%E2%80%9Cfind-your-company%E2%80%9D-feature-to-your-iphone-app-%E2%80%93-part-ii/",[589],"“How to add a ‘Find Your Company’ feature to your iPhone App”",[18,49093,49094,49095,49098,49099,986],{},"There’s a lot happening in the mobile world and on our ",[585,49096,49022],{"href":48267,"rel":49097},[589],", so you better\nhead over and subscribe to ",[585,49100,49102],{"href":48272,"rel":49101},[589],"our feed",{"title":48,"searchDepth":86,"depth":86,"links":49104},[],[613],"2010-05-21T13:52:57","It has been quite a while since my last update on what’s\\nhappening over at our Mobile Solutions Blog, so let’s get right into it.","https://synyx.de/blog/mobile-solutions-summary-2/",{},"/blog/mobile-solutions-summary-2",{"title":48168,"description":49112},"It has been quite a while since my last update on what’s\nhappening over at our Mobile Solutions Blog, so let’s get right into it.","mobile-solutions-summary-2","blog/mobile-solutions-summary-2",[20508],"It has been quite a while since my last update on what’s happening over at our Mobile Solutions Blog, so let’s get right into it. One of the most interesting…","1p6YXeszwV5TrMg-KutpsDhtSaWLSU9xOSsSsDGZ9Qo",{"id":49119,"title":49120,"author":49121,"body":49122,"category":49231,"date":49232,"description":49233,"extension":617,"link":49234,"meta":49235,"navigation":499,"path":49236,"seo":49237,"slug":49238,"stem":49239,"tags":49240,"teaser":49242,"__hash__":49243},"blog/blog/scrum-an-anti-word.md","Why is Scrum getting an anti-word?",[41052],{"type":11,"value":49123,"toc":49226},[49124,49127,49130,49133,49140,49145,49151,49154,49180,49183,49187,49190,49193,49196,49199,49202,49205,49208,49211,49214,49217,49220,49223],[14,49125,49120],{"id":49126},"why-is-scrum-getting-an-anti-word",[18,49128,49129],{},"For quite some years Scrum has been THE agile development process. Scrum got mainstream. But let’s have a look what got\nmainstream here. Scrum, Agility, Buzzwords, Scrum Master got mainstream as words, in business talk, in dev talk, in\ntrainings.",[18,49131,49132],{},"But what did it really achive for better communication, better relations and collaboration between developers, managers,\ncustomers etc. Has Scrum fundamentally improved the way software is delivered in our industry?",[18,49134,49135,49136,49139],{},"I probably couldn’t find many people who’d respond with an unconditional “",[27,49137,49138],{},"YES!","” to this question.",[18,49141,49142],{},[27,49143,49144],{},"But why? Why is Scrum getting an anti-word for many?",[18,49146,49147],{},[2223,49148],{"alt":49149,"src":49150},"\"Security!\"","http://defunctscrum.blogspot.com/2007/07/should-you-leave-scrum-off-resume.html",[18,49152,49153],{},"There are various different reasons:",[577,49155,49156,49159,49162,49165,49168,49171,49174,49177],{},[580,49157,49158],{},"Scrum’s transparency creats angst for people living in and from intransparency",[580,49160,49161],{},"Scrum’s need for change is uncomfortable for people’s need for stability",[580,49163,49164],{},"Wrong implementation of Scrum",[580,49166,49167],{},"Scrum Master who don’t take people with them and run ahead in their own speed",[580,49169,49170],{},"Unbalanced power division between roles",[580,49172,49173],{},"Scrum Master who have THE solution instead of enabling teams to find differing ways for different problems",[580,49175,49176],{},"The Scrum hype and overwhelming/missleading marketing",[580,49178,49179],{},"Could go on with lots more",[18,49181,49182],{},"Let’s focus on a few and look at them a little bit more in-depth.",[649,49184,49186],{"id":49185},"speedy-scrum-master","Speedy Scrum Master",[18,49188,49189],{},"A person who just got his Scrum Master Certificate and gets all enthusiastic about it returns to his company from\ntraining and wants to start. His textbook knowledge tells him how to technically implement Scrum, but without years of\nexperience it’s applied in a step-by-step way. Without soft-skills and knowing what’s appropriate when it’s allmost\nimpossible to be successful right away without watering down Scrum to non-Scrum. Picking people up where they are is\none of the key things. You can’t just tell them where they ought to be without telling why and what for.",[18,49191,49192],{},"Once this poor Scrum Master introduced Scrum to team for a few sprints he and his team will hit walls without knowing\nhow to climb over them. At this point the lack of experience of the Scrum Master leads to the first internal critics to\nsurface. The longer a Scrum Master only has his Scrum process goal in mind without providing real solutions the more\nScrum will be the scapegoat.",[18,49194,49195],{},"The more this happens (and I guess it does a lot), the more people will say “Oh no, not another of these Scrum guys”.",[649,49197,49170],{"id":49198},"unbalanced-power-division-between-roles",[18,49200,49201],{},"Unbalanced power division between roles causes wrong implementation and frustration.",[18,49203,49204],{},"For example Scrum Masters who don’t have the support of upper management to make things happen.",[18,49206,49207],{},"Teams without the power to stop a sprint, without the needed skills and not cross-functional are handicapped teams.",[18,49209,49210],{},"Product Owner who are not directly responsible for the profit and loss or ROI of the product. I often wonder how a\nProduct Owner is supposed to prioritize without? I still often hear something along the lines “we need everything!”.",[18,49212,49213],{},"One of the things that’s done wrong in projects that use Scrum is that Scrum should help a project to fail early instead\nof staying for a long time and dying a long slow death over years. Why’s that? Because often all people involved have\nconflicting interests regarding their job safety and early project death. The only one that could have an interest in\nthat is a Product Owner with budget responsibility who knows that a project shouldn’t be continued if the value of to be\nimplemented stories is lower than its costs.",[649,49215,49176],{"id":49216},"the-scrum-hype-and-overwhelmingmissleading-marketing",[18,49218,49219],{},"Scrum is often advertised as solution for everything. Scrum has been hyped for years. The result is lots of so called\n‘experts’ promote and implement Scrum without ensuring it’s done right or often even without the experience on how to do\nit right. Again Scrum gets the blame for failing projects that might or might not have failed anyway.",[18,49221,49222],{},"So is Scrum at fault? Or is it the way Scrum is used today? Can Scrum be rescued or do we need something new just\nbecause Scrum is done wrong instead of because it is wrong?",[18,49224,49225],{},"As with so many things I think we should focus more on quality instead of quantity. Don’t look for cookbook receipts,\nlet your team tailor their process inside the Scrum skeleton and share (don’t force it) your experience with them. I\nmight follow up on this in a future article.",{"title":48,"searchDepth":86,"depth":86,"links":49227},[49228,49229,49230],{"id":49185,"depth":126,"text":49186},{"id":49198,"depth":126,"text":49170},{"id":49216,"depth":126,"text":49176},[613],"2010-05-20T12:57:24","For quite some years Scrum has been THE agile development process. Scrum got mainstream. But let’s have a look what got\\nmainstream here. Scrum, Agility, Buzzwords, Scrum Master got mainstream as words, in business talk, in dev talk, in\\ntrainings.","https://synyx.de/blog/scrum-an-anti-word/",{},"/blog/scrum-an-anti-word",{"title":49120,"description":49129},"scrum-an-anti-word","blog/scrum-an-anti-word",[7906,49241,18709,14039,6884],"anti-scrum","For quite some years Scrum has been THE agile development process. Scrum got mainstream. But let’s have a look what got mainstream here. Scrum, Agility, Buzzwords, Scrum Master got mainstream…","61KWdx6d7036gQMdieWCu_XLPod6Mrrx0FD70e-o9Jk",{"id":49245,"title":49246,"author":49247,"body":49249,"category":49339,"date":49340,"description":49341,"extension":617,"link":49342,"meta":49343,"navigation":499,"path":49344,"seo":49345,"slug":49253,"stem":49346,"tags":49347,"teaser":49348,"__hash__":49349},"blog/blog/java-mail-properties-esmtp-authentication-error.md","Java Mail Properties – esmtp authentication error",[49248],"herbold",{"type":11,"value":49250,"toc":49337},[49251,49254,49257,49260,49294,49297,49332,49335],[14,49252,49246],{"id":49253},"java-mail-properties-esmtp-authentication-error",[18,49255,49256],{},"Last week, we had a problem with sending emails from our application. Currently we use the spring email integration to\ndo this.",[18,49258,49259],{},"An esmtp server runs at customers side, which does not accept the default mail settings we made for our local test\nenvironment:",[43,49261,49263],{"className":3792,"code":49262,"language":3794,"meta":48,"style":48},"\u003Cbean>\n \u003Cproperty name=\"host\" value=\"${mail.host}\"/>\n \u003Cproperty name=\"port\" value=\"${mail.port}\" />\n \u003Cproperty name=\"username\" value=\"${mail.user}\" />\n \u003Cproperty name=\"password\" value=\"${mail.password}\" />\n\u003C/bean>\n",[50,49264,49265,49270,49275,49280,49285,49290],{"__ignoreMap":48},[53,49266,49267],{"class":55,"line":56},[53,49268,49269],{},"\u003Cbean>\n",[53,49271,49272],{"class":55,"line":86},[53,49273,49274],{}," \u003Cproperty name=\"host\" value=\"${mail.host}\"/>\n",[53,49276,49277],{"class":55,"line":126},[53,49278,49279],{}," \u003Cproperty name=\"port\" value=\"${mail.port}\" />\n",[53,49281,49282],{"class":55,"line":163},[53,49283,49284],{}," \u003Cproperty name=\"username\" value=\"${mail.user}\" />\n",[53,49286,49287],{"class":55,"line":186},[53,49288,49289],{}," \u003Cproperty name=\"password\" value=\"${mail.password}\" />\n",[53,49291,49292],{"class":55,"line":221},[53,49293,40519],{},[18,49295,49296],{},"Following property fixed the problem (after a long time of experimenting with different mail.smtp.* properties!):",[43,49298,49300],{"className":3792,"code":49299,"language":3794,"meta":48,"style":48},"\u003Cproperty name=\"javaMailProperties\">\n \u003Cprops>\n \u003Cprop key=\"mail.smtp.localhost\">mylocalhost\u003C/prop>\n \u003Cprop key=\"mail.smtp.ehlo\">false\u003C/prop>\n \u003C/props>\n\u003C/property>\n\n",[50,49301,49302,49307,49312,49317,49322,49327],{"__ignoreMap":48},[53,49303,49304],{"class":55,"line":56},[53,49305,49306],{},"\u003Cproperty name=\"javaMailProperties\">\n",[53,49308,49309],{"class":55,"line":86},[53,49310,49311],{}," \u003Cprops>\n",[53,49313,49314],{"class":55,"line":126},[53,49315,49316],{}," \u003Cprop key=\"mail.smtp.localhost\">mylocalhost\u003C/prop>\n",[53,49318,49319],{"class":55,"line":163},[53,49320,49321],{}," \u003Cprop key=\"mail.smtp.ehlo\">false\u003C/prop>\n",[53,49323,49324],{"class":55,"line":186},[53,49325,49326],{}," \u003C/props>\n",[53,49328,49329],{"class":55,"line":221},[53,49330,49331],{},"\u003C/property>\n",[18,49333,49334],{},"With these two settings you create at least the following command that is required to send the mails : helo mylocalhost",[607,49336,989],{},{"title":48,"searchDepth":86,"depth":86,"links":49338},[],[613],"2010-05-06T17:22:50","Last week, we had a problem with sending emails from our application. Currently we use the spring email integration to\\ndo this.","https://synyx.de/blog/java-mail-properties-esmtp-authentication-error/",{},"/blog/java-mail-properties-esmtp-authentication-error",{"title":49246,"description":49256},"blog/java-mail-properties-esmtp-authentication-error",[],"Last week, we had a problem with sending emails from our application. Currently we use the spring email integration to do this. An esmtp server runs at customers side, which…","JuUb87jTtYgzUj6ke8EUX_BsQ7XcnNlJOrPQYaKisqg",{"id":49351,"title":48168,"author":49352,"body":49353,"category":49417,"date":49418,"description":49419,"extension":617,"link":49420,"meta":49421,"navigation":499,"path":49422,"seo":49423,"slug":48175,"stem":49425,"tags":49426,"teaser":49427,"__hash__":49428},"blog/blog/mobile-solutions-summary.md",[48170],{"type":11,"value":49354,"toc":49415},[49355,49357,49364,49376,49383,49389,49401,49408],[14,49356,48168],{"id":48175},[18,49358,49359,49360,49363],{},"Since the inception of our mobile blog last month a lot has happened over at ",[585,49361,48267],{"href":48267,"rel":49362},[589],". I’m going to\nhighlight some of the stuff for you here.",[18,49365,10847,49366,49371,49372,49375],{},[585,49367,49370],{"href":49368,"rel":49369},"http://mobile.synyx.de/2010/04/22/release-of-synyxsudoku/",[589],"biggest news"," is that our\ntrainee ",[585,49373,49035],{"href":48279,"rel":49374},[589]," released our very own Sudoku game for Android handsets:",[11259,49377,49378],{},[18,49379,49380],{},[573,49381,49382],{},"It’s completely free of charge! …. SynyxSudoku offers 3 difficulty levels, containing nearly unlimited puzzling fun\ndue to new created sudokus each time you launch! And even if none of these difficulty levels fits your needs –\nSynyxSudoku has the option to let you create a difficulty level of your own.",[18,49384,49385,49386,986],{},"Our Synyx Sudoku was very well received and downloaded more than 300 times in less than a week. Congratulations to\nTobias and if you haven’t checked it out, head over to our ",[585,49387,48329],{"href":48208,"rel":49388},[589],[18,49390,49391,49392,49395,49396,4101],{},"If you are into the hard stuff, be sure to check out ",[585,49393,47589],{"href":48639,"rel":49394},[589],"\n‘s ",[585,49397,49400],{"href":49398,"rel":49399},"http://mobile.synyx.de/2010/04/22/howto-startup-with-maemo-and-qt-4-6/",[589],"tutorial on how to get started programing for the N900",[11259,49402,49403],{},[18,49404,49405],{},[573,49406,49407],{},"As a Java developer it was not easy for me to find the right entry point for developing c++, using the\ntrend-setting Qt 4.6 environment and having a cute ide with rapid prototyping capabilities. After a little bit of\nreading and lots of trials and errors i found a way for me that worked.",[18,49409,49410,49411,986],{},"It’ll make things less tough and get you started with your development environment. Don’t miss Florian’s next\ninstallment and subscribe to the ",[585,49412,49414],{"href":48272,"rel":49413},[589],"mobile solutions blog’s feed",{"title":48,"searchDepth":86,"depth":86,"links":49416},[],[613],"2010-04-30T06:46:25","Since the inception of our mobile blog last month a lot has happened over at http://mobile.synyx.de. I’m going to\\nhighlight some of the stuff for you here.","https://synyx.de/blog/mobile-solutions-summary/",{},"/blog/mobile-solutions-summary",{"title":48168,"description":49424},"Since the inception of our mobile blog last month a lot has happened over at http://mobile.synyx.de. I’m going to\nhighlight some of the stuff for you here.","blog/mobile-solutions-summary",[20508],"Since the inception of our mobile blog last month a lot has happened over at http://mobile.synyx.de. I’m going to highlight some of the stuff for you here. The biggest news…","5Tr_PJe_pLL4p-X-PYcVotPpC2D6CuNgaiXYPG0-eKc",{"id":49430,"title":49431,"author":49432,"body":49433,"category":49629,"date":49630,"description":49631,"extension":617,"link":49443,"meta":49632,"navigation":499,"path":49633,"seo":49634,"slug":49437,"stem":49636,"tags":49637,"teaser":49640,"__hash__":49641},"blog/blog/modular-web-applications-based-on-spring.md","Modular Web-Applications based on Spring",[12981],{"type":11,"value":49434,"toc":49627},[49435,49438,49447,49450,49459,49472,49475,49497,49503,49511,49521,49554,49571,49577,49615,49625],[14,49436,49431],{"id":49437},"modular-web-applications-based-on-spring",[18,49439,49440,49441,49446],{},"Many of the Web-Applications we develop for our customers are based upon our small Framework on top\nof ",[585,49442,49445],{"href":49443,"rel":49444},"https://synyx.de/blog/modular-web-applications-based-on-spring/",[589],"Spring / Spring MVC",". This framework basically\nbrings often used components ready-to-use (or ready to customize) and – of course – makes things even simpler than\nSpring already does.",[18,49448,49449],{},"Modular design of applications brings a lot of advantages but – as always – also some disadvantages. A modular structure\ncan help to increase cohesion and let developers focus on the function their concrete module has. Another big thing is\nreusability. So the core framework already brings functionality that is used in all projects that depend on the\nframework so far: user management for example. On the other hand modular design also brings complexity. I think its\nbusiness of a framework to hide this complexity from the user (developer in this case). Nevertheless its good when the\ndeveloper knows (and understands) what goes on under the hood and (even more important) can easily extend the framework\nwhere he needs to.",[18,49451,49452,49453,49458],{},"As I mentioned we use Spring / Spring MVC as a base for many projects. Spring provides a lot of points where you can\nextend the framework by implementing interfaces and defining or injecting these implementations to the classes that do\nthe real work (like interceptors\nin ",[585,49454,49457],{"href":49455,"rel":49456},"http://static.springsource.org/spring/docs/3.0.x/javadoc-api/org/springframework/web/servlet/mvc/annotation/DefaultAnnotationHandlerMapping.html",[589],"AnnotationHandlerMapping",").\nIn most cases this is enough. If you start to develop a modular application it is not. At least it was not for our case.",[18,49460,49461,49462,49464,49465,49468,49469],{},"Our Web-Applications based on the mentioned Framework always use at least two “modules”: The core-module (that brings\nuser management and some nice features to make coders happy) and the application itself. Each of these modules brings\nits own bean-configurations which are loaded all together using a wildcard resource like the following that reads all\n",[50,49463,37616],{}," files within any subfolder of",[50,49466,49467],{},"META-INF"," in the applications classpath: ",[50,49470,49471],{},"classpath*:META-INF/**/beans.xml",[18,49473,49474],{},"This is a really simple way how the modules can interact since one module can provide a Service another depends on.",[18,49476,49477,49478,49483,49484,49486,49487,49490,49491,49493,49494,49496],{},"Now, the big problem is, that each module often brings its own components that have to be registered to Springs\n“services”. Let me explain the problem to you by talking\nabout",[585,49479,49482],{"href":49480,"rel":49481},"http://blog.synyx.de/2010/04/21/know-your-apis-lessons-learned-from-resourcebundle/",[589],"internationalization again",".\nEach module brings its own resource bundle containing the internationalization for its web-interface components. Spring\nprovides a simple way to register a",[50,49485,45900],{}," by defining one bean with id",[50,49488,49489],{},"messageSource"," in your context. And that\nis exactly the problem. It is",[27,49492,12335],{},"bean. So you need a way where each module can register its own",[50,49495,45900],{},", even\nif Spring only supports one. So our framework has to handle this, because it also introduces the modular structure.",[18,49498,49499,49500,49502],{},"The “out of the box” way that would work is that the application, that assembles all these modules together defines the\n",[50,49501,45900],{}," with all basenames (of the properties-files) the application uses. But this would be part of the\nmentioned complexity that should be kept away from the daily business and brings some other problems in (what, if one\nmodule wants to store its internationalization within the database?…).",[18,49504,49505,49506],{},"So what did we do? We use a Simple plugin-mechanism a colleague developed and Synyx publishes\nOpenSource:",[585,49507,49510],{"href":49508,"rel":49509},"http://hera.synyx.org",[589],"Hera",[18,49512,49513,49514,49516,49517,49520],{},"We use a bean that gets registered as",[50,49515,45900],{}," to Spring that takes care of dispatching the message-resolving\nrequests to the real ",[50,49518,49519],{},"MessageSources"," implementations spread all over the modules.",[43,49522,49524],{"className":13786,"code":49523,"language":13788,"meta":48,"style":48},"\u003Cbean id=\"messageSource\">\n \u003Cproperty name=\"sources\">\n \u003Cplugin:list class=\"org.synyx.minos.message.ModuleMessageSource\"/>\n \u003C/property>\n \u003Cproperty name=\"useCodeAsDefaultMessage\" value=\"true\" />\n\u003C/bean>\n",[50,49525,49526,49531,49536,49541,49545,49550],{"__ignoreMap":48},[53,49527,49528],{"class":55,"line":56},[53,49529,49530],{},"\u003Cbean id=\"messageSource\">\n",[53,49532,49533],{"class":55,"line":86},[53,49534,49535],{}," \u003Cproperty name=\"sources\">\n",[53,49537,49538],{"class":55,"line":126},[53,49539,49540],{}," \u003Cplugin:list class=\"org.synyx.minos.message.ModuleMessageSource\"/>\n",[53,49542,49543],{"class":55,"line":163},[53,49544,42112],{},[53,49546,49547],{"class":55,"line":186},[53,49548,49549],{}," \u003Cproperty name=\"useCodeAsDefaultMessage\" value=\"true\" />\n",[53,49551,49552],{"class":55,"line":221},[53,49553,40519],{},[18,49555,49556,49557,49560,49561,49564,49565,49567,49568,49570],{},"So this registers our",[50,49558,49559],{},"DispatchingMessageSource"," that gets injected into all beans within the context, implementing\n",[50,49562,49563],{},"ModuleMessageSource"," by Hera. This pretty much does the trick. The reason that we use",[50,49566,49563],{}," instead of\nSprings built-in",[50,49569,45900],{},"-interface is on the one hand so that we can do some performance-tweaks and on the\nother hand so that we dont get any “unwanted” implementations, which get to the context somehow.",[18,49572,49573,49574,49576],{},"With some simple dispatching logic within",[50,49575,49559],{}," we found a powerful way to conquer the insufficiency\nof Spring, in conjunction with our modular system.",[43,49578,49580],{"className":13786,"code":49579,"language":13788,"meta":48,"style":48},"List candidates = sources.getPluginsFor(getPrefixFromCode(code));\nfor (MessageSourcePlugin source : candidates) {\n MessageFormat format = resolveMessageWithSource(source, code, locale);\n if (null != format) {\n return format;\n }\n}\n",[50,49581,49582,49587,49592,49597,49602,49607,49611],{"__ignoreMap":48},[53,49583,49584],{"class":55,"line":56},[53,49585,49586],{},"List candidates = sources.getPluginsFor(getPrefixFromCode(code));\n",[53,49588,49589],{"class":55,"line":86},[53,49590,49591],{},"for (MessageSourcePlugin source : candidates) {\n",[53,49593,49594],{"class":55,"line":126},[53,49595,49596],{}," MessageFormat format = resolveMessageWithSource(source, code, locale);\n",[53,49598,49599],{"class":55,"line":163},[53,49600,49601],{}," if (null != format) {\n",[53,49603,49604],{"class":55,"line":186},[53,49605,49606],{}," return format;\n",[53,49608,49609],{"class":55,"line":221},[53,49610,860],{},[53,49612,49613],{"class":55,"line":242},[53,49614,282],{},[18,49616,49617,49618,1073,49621,49624],{},"By the way, we use this mechanism a lot when it comes to easily extending functionality of the framework-core including\n",[50,49619,49620],{},"HandlerInterceptor",[50,49622,49623],{},"PropertyEditorRegistrar"," and our Modules itself.",[607,49626,989],{},{"title":48,"searchDepth":86,"depth":86,"links":49628},[],[613],"2010-04-23T11:45:49","Many of the Web-Applications we develop for our customers are based upon our small Framework on top\\nof Spring / Spring MVC. This framework basically\\nbrings often used components ready-to-use (or ready to customize) and – of course – makes things even simpler than\\nSpring already does.",{},"/blog/modular-web-applications-based-on-spring",{"title":49431,"description":49635},"Many of the Web-Applications we develop for our customers are based upon our small Framework on top\nof Spring / Spring MVC. This framework basically\nbrings often used components ready-to-use (or ready to customize) and – of course – makes things even simpler than\nSpring already does.","blog/modular-web-applications-based-on-spring",[49638,48316,45942,49639,4044,1010],"architecture","modular","Many of the Web-Applications we develop for our customers are based upon our small Framework on top of Spring / Spring MVC. This framework basically brings often used components ready-to-use…","9CCujFfUkmvCk9K2-lTvameRh4QQAhnYiCvpeGTJeT8",{"id":49643,"title":49644,"author":49645,"body":49646,"category":49866,"date":49867,"description":49868,"extension":617,"link":49869,"meta":49870,"navigation":499,"path":49871,"seo":49872,"slug":49650,"stem":49873,"tags":49874,"teaser":49876,"__hash__":49877},"blog/blog/know-your-apis-lessons-learned-from-resourcebundle.md","Know your APIs – Lessons learned from ResourceBundle",[12981],{"type":11,"value":49647,"toc":49864},[49648,49651,49654,49661,49664,49684,49701,49727,49733,49746,49759,49774,49783,49809,49812,49844,49847],[14,49649,49644],{"id":49650},"know-your-apis-lessons-learned-from-resourcebundle",[18,49652,49653],{},"Last week I spent some time hunting down an internationalization-issue that came along while developing for a recent\nproject. Let me explain what happened:",[18,49655,49656,49657,49660],{},"Message-Lookup – of course – always stands together with Locales (",[50,49658,49659],{},"java.util.Locale",") of the client the message is\nresolved for. The problem was, that messages for the English users were not resolved to the English translation, but to\nthe German one.",[18,49662,49663],{},"Within the project I am working on, there were the following message-files at that time:",[577,49665,49666,49672,49678],{},[580,49667,49668,49671],{},[50,49669,49670],{},"messages.properties"," (containing the english translation)",[580,49673,49674,49677],{},[50,49675,49676],{},"messages_de.properties"," (containing the German translation)",[580,49679,49680,49683],{},[50,49681,49682],{},"messages_it.properties"," (containing Italian translation)",[18,49685,49686,49687,49690,49691,49694,49695,49697,49698,49700],{},"Usually you provide a property-file per language containing all the translations. ",[50,49688,49689],{},"ResourceBundle"," uses a\nfallback-mechanism from the full locale down to more general ones (e.g. it first checks for ",[50,49692,49693],{},"messages_de_DE.properties","\nto ",[50,49696,49676],{}," down to ",[50,49699,49670],{}," in the end, being the overall default).",[18,49702,49703,49704,49706,49707,49710,49711,11792,49714,49717,49718,49720,49721,49723,49724,49726],{},"This actually makes much sense because in this way you can provide values for stuff relevant for all languages in\n",[50,49705,49670],{},", English language specific values in ",[50,49708,49709],{},"messages_en.properties"," and stuff that is different for\ndifferent english speaking Countries in files like ",[50,49712,49713],{},"messages_en_US.properties",[50,49715,49716],{},"messages_en_UK.properties"," and so\non. The fallback works perfect, because if you for example don’t specify US/UK specific files message-lookup for both\nen_US and en_UK result in resolving keys from ",[50,49719,49709],{},". Additionally, if ",[50,49722,49713],{},"\nwould exist but the key-lookup fails (the file does not provide a translation for the key), the key gets also looked up\nfrom ",[50,49725,49709],{}," (which may also provide the key).",[18,49728,49729,49730,49732],{},"In my concrete case, resolving of German values (coming from ",[50,49731,49676],{},") worked well, but if you change\nthe users locale to English, the keys also resolved to German values.",[18,49734,49735,49736,49738,49739,49742,49743,49745],{},"I did not create a ",[50,49737,49709],{}," because English should also be the overall fallback if no i18n for the\nrequested language was available. I thought if the user had the locale fr the system would check for\n",[50,49740,49741],{},"messages_fr.properties"," and then in ",[50,49744,49670],{},", which would display English messages for the user because no\nFrench translation is available. I debugged a while, basically within our own framework and later down to Springs i18n\nrelated classes and I could not find the mistake.",[18,49747,49748,49749,49752,49753,49758],{},"Then, when I excluded all possible mistakes on our side and inside the Spring Framework where our application is based\non, my way lead me down to ",[50,49750,49751],{},"java.util.ResourceBundle",". Since this class makes extensive use of caching (for good reasons\nof course) and static factory methods (which are almost impossible to debug) my way lead me to the API-Doc\nof ",[585,49754,49689],{"href":49755,"rel":49756,"title":49757},"http://api.synyx.de/j2sdk6/api/java/util/ResourceBundle.html",[589],"API-Doc of java.util.ResourceBundle",".\nHere I found the mistake that I made:",[11259,49760,49761],{},[18,49762,49763,49766,49767,49770,49771,986],{},[50,49764,49765],{},"getBundle"," uses the base name, the specified locale, and the default locale (obtained from ",[50,49768,49769],{},"Locale.getDefault",") to\ngenerate a sequence of ",[573,49772,49773],{},"candidate bundle names",[18,49775,49776,49777,49779,49780,49782],{},"I forgot that ",[50,49778,49689],{}," also looks up files for the JVMs Default-Locale before falling back to the base-file (\n",[50,49781,49670],{},"). The Default-Locale of my JVM is de_DE, which lead to the following path:",[577,49784,49785,49790,49794,49799,49804],{},[580,49786,49787,49789],{},[50,49788,49713],{}," (not found)",[580,49791,49792,49789],{},[50,49793,49709],{},[580,49795,49796,49798],{},[50,49797,49693],{}," (from Default-Locale, not found)",[580,49800,49801,49803],{},[50,49802,49676],{}," (from Default-Locale, FOUND)",[580,49805,1067,49806,49808],{},[50,49807,49670],{}," was not checked because the key was found)",[18,49810,49811],{},"So the fix was easy. There are even several ways to fix it:",[577,49813,49814,49822,49831,49838],{},[580,49815,49816,49817,11598,49819,49821],{},"rename ",[50,49818,49670],{},[50,49820,49709],{}," (which leads to the French dude having to learn German).",[580,49823,49824,49825,11598,49827,49830],{},"copy ",[50,49826,49670],{},[50,49828,49829],{},"message_en.properties"," (this is copy paste but this could be solved within the\nbuild-process using mvn)",[580,49832,49833,49834,49837],{},"set the default-locale of the JVM to an English one by calling ",[50,49835,49836],{},"Locale.setDefault(englishLocale)"," early in\napplication-boot",[580,49839,49840,49841],{},"set the default-locale as commandline-argument of your JVM (e.g.",[50,49842,49843],{},"-Duser.language=en -Duser.country=US",[18,49845,49846],{},"And, last but not least:What do we (or better I)learn from this?",[577,49848,49849,49852,49855],{},[580,49850,49851],{},"My usual approach “first think about whats happening, if you cannot figure out what leads to the problem immediately\nattach debugger” was obviously not the best approach in this case (although starting immediately to debug has turned\nout to be a very efficient way to hunt down bugs for me in general).",[580,49853,49854],{},"Code is not the only place where bugs can be found. A big problem is also understanding the basic APIs you use.",[580,49856,49857,49858,49860,49863],{},"Even if you use high-level APIs (i18n in this case with about 5 delegates before the ResourceBundle was actually\nreached) you still need to know the very basics.",[8041,49859],{},[27,49861,49862],{},"Reading APIs"," earlier (ok, the whole hunting only took about 1-2 hours, but still)or even (in a perfect world)\nknow all your APIs you use directly or indirectly would have saved some time.",{"title":48,"searchDepth":86,"depth":86,"links":49865},[],[613],"2010-04-21T11:36:20","Last week I spent some time hunting down an internationalization-issue that came along while developing for a recent\\nproject. Let me explain what happened:","https://synyx.de/blog/know-your-apis-lessons-learned-from-resourcebundle/",{},"/blog/know-your-apis-lessons-learned-from-resourcebundle",{"title":49644,"description":49653},"blog/know-your-apis-lessons-learned-from-resourcebundle",[14573,45940,45942,49875],"resourcebundle","Last week I spent some time hunting down an internationalization-issue that came along while developing for a recent project. Let me explain what happened: Message-Lookup – of course – always…","kXv-V49VLTNQaYwmmCOhdlmLOL3IGkxpCAHfqpbUzrA",[49879,49881,49883,49886,49888,49891,49894,49897,49898,49900,49902,49905,49908,49910,49912,49915,49917,49920,49923,49926,49929,49932,49934,49937,49940,49943,49946,49947,49949,49952,49955,49958,49960,49963,49966,49969,49972,49975,49978,49981,49984,49986,49988,49991,49993,49996,49999,50001,50004,50005,50007,50009,50011,50014,50017,50019,50022,50024,50027,50029,50032,50034,50037,50039,50042,50045,50048,50050,50053,50055,50058,50060,50063,50066,50068,50071,50074,50077,50080,50082,50084,50086,50088,50091,50094,50097,50100,50103,50106,50108,50111,50114,50117,50120,50123,50126,50129,50131,50133,50135,50138,50140,50143,50146,50148,50150,50153,50156,50159,50162,50165,50167,50169,50171,50174,50177,50180,50183,50186,50189,50192,50195,50197,50200,50203,50205,50207,50210,50213,50216,50217,50219,50222,50224,50226,50229,50232,50234,50237,50240,50242,50245],{"slug":1758,"name":49880},"Jennifer Abel",{"slug":28513,"name":49882},"Otto Allmendinger",{"slug":49884,"name":49885},"antony","Ben Antony",{"slug":27221,"name":49887},"Joachim Arrasz",{"slug":49889,"name":49890},"bauer","David Bauer",{"slug":49892,"name":49893},"bechtold","Janine Bechtold",{"slug":49895,"name":49896},"boersig","Jasmin Börsig",{"slug":41052,"name":41708},{"slug":30653,"name":49899},"Aljona Buchloh",{"slug":9,"name":49901},"Julia Burgard",{"slug":49903,"name":49904},"caspar-schwedes","Caspar Schwedes",{"slug":49906,"name":49907},"christina-schmitt","Christina Schmitt",{"slug":16523,"name":49909},"Michael Clausen",{"slug":7620,"name":49911},"Thomas Pötzsch",{"slug":49913,"name":49914},"damrath","Sebastian Damrath",{"slug":47928,"name":49916},"Markus Daniel",{"slug":49918,"name":49919},"dasch","Julia Dasch",{"slug":49921,"name":49922},"denman","Joffrey Denman",{"slug":49924,"name":49925},"dfuchs","Daniel Fuchs",{"slug":49927,"name":49928},"dobler","Max Dobler",{"slug":49930,"name":49931},"dobriakov","Vladimir Dobriakov",{"slug":49933,"name":49933},"dreiqbik",{"slug":49935,"name":49936},"dschaefer","Denise Schäfer",{"slug":49938,"name":49939},"dschneider","Dominik Schneider",{"slug":49941,"name":49942},"duerlich","Isabell Duerlich",{"slug":49944,"name":49945},"dutkowski","Bernd Dutkowski",{"slug":18598,"name":18598},{"slug":633,"name":49948},"Tim Essig",{"slug":49950,"name":49951},"ferstl","Maximilian Ferstl",{"slug":49953,"name":49954},"fey","Prisca Fey",{"slug":49956,"name":49957},"frank","Leonard Frank",{"slug":7799,"name":49959},"Arnold Franke",{"slug":49961,"name":49962},"frischer","Nicolette Rudmann",{"slug":49964,"name":49965},"fuchs","Petra Fuchs",{"slug":49967,"name":49968},"gari","Sarah Gari",{"slug":49970,"name":49971},"gast","Gast",{"slug":49973,"name":49974},"graf","Johannes Graf",{"slug":49976,"name":49977},"grammlich","Daniela Grammlich",{"slug":49979,"name":49980},"guthardt","Sabrina Guthardt",{"slug":49982,"name":49983},"haeussler","Johannes Häussler",{"slug":16056,"name":49985},"Daniel Hammann",{"slug":5734,"name":49987},"Julian Heetel",{"slug":49989,"name":49990},"heft","Florian Heft",{"slug":7619,"name":49992},"Sebastian Heib",{"slug":49994,"name":49995},"heisler","Ida Heisler",{"slug":49997,"name":49998},"helm","Patrick Helm",{"slug":49248,"name":50000},"Michael Herbold",{"slug":50002,"name":50003},"hofmann","Peter Hofmann",{"slug":42564,"name":23375},{"slug":5733,"name":50006},"Alina Jaud",{"slug":8872,"name":50008},"Robin De Silva Jayasinghe",{"slug":32993,"name":50010},"Jonathan Buch",{"slug":50012,"name":50013},"junghanss","Gitta Junghanß",{"slug":50015,"name":50016},"kadyietska","Khrystyna Kadyietska",{"slug":12981,"name":50018},"Marc Kannegiesser",{"slug":50020,"name":50021},"karoly","Robert Károly",{"slug":43256,"name":50023},"Katja Arrasz-Schepanski",{"slug":50025,"name":50026},"kaufmann","Florian Kaufmann",{"slug":19393,"name":50028},"Mike Kesler",{"slug":50030,"name":50031},"kirchgaessner","Bettina Kirchgäßner",{"slug":16057,"name":50033},"Yannic Klem",{"slug":50035,"name":50036},"klenk","Timo Klenk",{"slug":25806,"name":50038},"Tobias Knell",{"slug":50040,"name":50041},"knoll","Anna-Lena Knoll",{"slug":50043,"name":50044},"knorre","Matthias Knorre",{"slug":50046,"name":50047},"koenig","Melanie König",{"slug":25590,"name":50049},"Thomas Kraft",{"slug":50051,"name":50052},"krupicka","Florian Krupicka",{"slug":12893,"name":50054},"Christian Kühn",{"slug":50056,"name":50057},"lange","Christian Lange",{"slug":10070,"name":50059},"Luca Arrasz",{"slug":50061,"name":50062},"leist","Sascha Leist",{"slug":50064,"name":50065},"lihs","Michael Lihs",{"slug":48170,"name":50067},"David Linsin",{"slug":50069,"name":50070},"maniyar","Christian Maniyar",{"slug":50072,"name":50073},"martin","Björnie",{"slug":50075,"name":50076},"martin-koch","Martin Koch",{"slug":50078,"name":50079},"matt","Tobias Matt",{"slug":19894,"name":50081},"Christian Mennerich",{"slug":41932,"name":50083},"Alexander Menz",{"slug":16407,"name":50085},"Frederick Meseck",{"slug":13434,"name":50087},"Oliver Messner",{"slug":50089,"name":50090},"michael-ploed","Michael Plöd",{"slug":50092,"name":50093},"mies","Marius Mies",{"slug":50095,"name":50096},"mihai","Alina Mihai",{"slug":50098,"name":50099},"moeller","Jörg Möller",{"slug":50101,"name":50102},"mohr","Rebecca Mohr",{"slug":50104,"name":50105},"moretti","David Moretti",{"slug":19626,"name":50107},"Sven Müller",{"slug":50109,"name":50110},"muessig","Alexander Müssig",{"slug":50112,"name":50113},"neupokoev","Grigory Neupokoev",{"slug":50115,"name":50116},"nussbaecher","Carmen Nussbächer",{"slug":50118,"name":50119},"ochs","Pascal Ochs",{"slug":50121,"name":50122},"oelhoff","Jan Oelhoff",{"slug":50124,"name":50125},"oengel","Yasin Öngel",{"slug":50127,"name":50128},"oezsoy","Enis Özsoy",{"slug":13180,"name":50130},"Maya Posch",{"slug":50132,"name":18751},"ralfmueller",{"slug":50134,"name":50134},"redakteur",{"slug":50136,"name":50137},"reich","Michael Reich",{"slug":6133,"name":50139},"Karl-Ludwig Reinhard",{"slug":50141,"name":50142},"rmueller","Rebecca Müller",{"slug":50144,"name":50145},"rosum","Jan Rosum",{"slug":50147,"name":50147},"rueckert",{"slug":41600,"name":50149},"Sascha Rüssel",{"slug":50151,"name":50152},"sauter","Moritz Sauter",{"slug":50154,"name":50155},"schaefer","Julian Schäfer",{"slug":50157,"name":50158},"scherer","Petra Scherer",{"slug":50160,"name":50161},"schlicht","Anne Schlicht",{"slug":50163,"name":50164},"schmidt","Jürgen Schmidt",{"slug":13652,"name":50166},"Tobias Schneider",{"slug":6892,"name":50168},"Benjamin Seber",{"slug":9608,"name":50170},"Marc Sommer",{"slug":50172,"name":50173},"speaker-fels","Jakob Fels",{"slug":50175,"name":50176},"speaker-gierke","Oliver Gierke",{"slug":50178,"name":50179},"speaker-krupa","Malte Krupa",{"slug":50181,"name":50182},"speaker-mader","Jochen Mader",{"slug":50184,"name":50185},"speaker-meusel","Tim Meusel",{"slug":50187,"name":50188},"speaker-milke","Oliver Milke",{"slug":50190,"name":50191},"speaker-paluch","Mark Paluch",{"slug":50193,"name":50194},"speaker-schad","Jörg Schad",{"slug":43031,"name":50196},"Jochen Schalanda",{"slug":50198,"name":50199},"speaker-schauder","Jens Schauder",{"slug":50201,"name":50202},"speaker-unterstein","Johannes Unterstein",{"slug":50204,"name":23228},"speaker-wolff",{"slug":50206,"name":23407},"speaker-zoerner",{"slug":50208,"name":50209},"stefan-belger","Stefan Belger",{"slug":50211,"name":50212},"steinegger","Roland Steinegger",{"slug":50214,"name":50215},"stern","sternchen synyx",{"slug":6885,"name":6885},{"slug":18394,"name":50218},"Mateusz Szulc",{"slug":50220,"name":50221},"tamara","Tamara Tunczinger",{"slug":17762,"name":50223},"Tobias Theuer",{"slug":13435,"name":50225},"Sandra Thieme",{"slug":50227,"name":50228},"thies-clasen","Marudor",{"slug":50230,"name":50231},"toernstroem","Olle Törnström",{"slug":7730,"name":50233},"Max Ullinger",{"slug":50235,"name":50236},"ulrich","Stephan Ulrich",{"slug":50238,"name":50239},"wagner","Stefan Wagner",{"slug":12282,"name":50241},"Andreas Weigel",{"slug":50243,"name":50244},"werner","Fabian Werner",{"slug":50246,"name":50247},"wolke","Sören Wolke",["Reactive",50249],{"$scookieConsent":50250,"$ssite-config":50252},{"functional":50251,"analytics":50251},false,{"_priority":50253,"env":22467,"name":50257,"url":50258},{"name":50254,"env":50255,"url":50256},-10,-15,0,"nuxt-app","https://synyx.de",["Set"],["ShallowReactive",50261],{"category-developer-blog":-1,"authors":-1},"/blog/kategorien/developer-blog"]