diff --git a/404.html b/404.html index 4df39d8..aabdcf7 100644 --- a/404.html +++ b/404.html @@ -12,8 +12,8 @@ - - + + @@ -116,8 +116,13 @@
  • - - Tags + + Archives + +
  • +
  • + + Search
  • diff --git a/archives/index.html b/archives/index.html new file mode 100644 index 0000000..5011d7b --- /dev/null +++ b/archives/index.html @@ -0,0 +1,349 @@ + + + + + + + +Archives | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + + +
    +

    2024  1 +

    +
    +

    February  1

    +
    +
    +

    Resize Ubuntu VM Disk in Proxmox +

    +
    February 6, 2024 · 1 min · 53 words · Mafyuh
    + +
    +
    +
    +
    +
    +

    2023  6 +

    +
    +

    November  2

    +
    +
    +

    Proton Mail - SimpleLogin authentik Social Login Setup +

    +
    November 12, 2023 · 2 min · 227 words · Mafyuh
    + +
    +
    +

    How To Automate Jellyfin Issue Handling +

    +
    November 11, 2023 · 19 min · 3952 words · Mafyuh
    + +
    +
    +
    +
    +

    October  2

    +
    +
    +

    How to authenticate Guacamole via authentik with Cloudflare and Nginx Proxy Manager +

    +
    October 29, 2023 · 2 min · 344 words · Mafyuh
    + +
    +
    +

    How to authenticate Zammad via SAML with Nginx Proxy Manager +

    +
    October 29, 2023 · 1 min · 107 words · Mafyuh
    + +
    +
    +
    +
    +

    September  1

    +
    +
    +

    How To Authenticate KASM via authentik +

    +
    September 30, 2023 · 2 min · 393 words · Mafyuh
    + +
    +
    +
    +
    +

    August  1

    +
    +
    +

    Software +

    +
    August 26, 2023 · 1 min · 124 words · Mafyuh
    + +
    +
    +
    +
    +
    + + + + + + + + + + + + + + diff --git a/assets/js/search.08e05469bdb256c7ac07101becb0f8f73deb8a2e7fc656044896538a7f8023e1.js b/assets/js/search.08e05469bdb256c7ac07101becb0f8f73deb8a2e7fc656044896538a7f8023e1.js new file mode 100644 index 0000000..0430aa8 --- /dev/null +++ b/assets/js/search.08e05469bdb256c7ac07101becb0f8f73deb8a2e7fc656044896538a7f8023e1.js @@ -0,0 +1,19 @@ +/** + * Fuse.js v7.0.0 - Lightweight fuzzy-search (http://fusejs.io) + * + * Copyright (c) 2023 Kiro Risk (http://kiro.me) + * All Rights Reserved. Apache Software License 2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +var e,t;e=this,t=function(){"use strict";function e(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function t(t){for(var n=1;ne.length)&&(t=e.length);for(var n=0,r=new Array(t);n0&&void 0!==arguments[0]?arguments[0]:{},n=t.getFn,i=void 0===n?O.getFn:n,o=t.fieldNormWeight,a=void 0===o?O.fieldNormWeight:o;r(this,e),this.norm=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:3,n=new Map,r=Math.pow(10,t);return{get:function(t){var i=t.match(j).length;if(n.has(i))return n.get(i);var o=1/Math.pow(i,.5*e),a=parseFloat(Math.round(o*r)/r);return n.set(i,a),a},clear:function(){n.clear()}}}(a,3),this.getFn=i,this.isCreated=!1,this.setIndexRecords()}return o(e,[{key:"setSources",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];this.docs=e}},{key:"setIndexRecords",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];this.records=e}},{key:"setKeys",value:function(){var e=this,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];this.keys=t,this._keysMap={},t.forEach((function(t,n){e._keysMap[t.id]=n}))}},{key:"create",value:function(){var e=this;!this.isCreated&&this.docs.length&&(this.isCreated=!0,f(this.docs[0])?this.docs.forEach((function(t,n){e._addString(t,n)})):this.docs.forEach((function(t,n){e._addObject(t,n)})),this.norm.clear())}},{key:"add",value:function(e){var t=this.size();f(e)?this._addString(e,t):this._addObject(e,t)}},{key:"removeAt",value:function(e){this.records.splice(e,1);for(var t=e,n=this.size();t2&&void 0!==arguments[2]?arguments[2]:{},r=n.getFn,i=void 0===r?O.getFn:r,o=n.fieldNormWeight,a=void 0===o?O.fieldNormWeight:o,c=new A({getFn:i,fieldNormWeight:a});return c.setKeys(e.map(x)),c.setSources(t),c.create(),c}function I(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=t.errors,r=void 0===n?0:n,i=t.currentLocation,o=void 0===i?0:i,a=t.expectedLocation,c=void 0===a?0:a,s=t.distance,h=void 0===s?O.distance:s,u=t.ignoreLocation,l=void 0===u?O.ignoreLocation:u,d=r/e.length;if(l)return d;var f=Math.abs(c-o);return h?d+f/h:f?1:d}var F=32;function C(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=r.location,o=void 0===i?O.location:i,a=r.distance,c=void 0===a?O.distance:a,s=r.threshold,h=void 0===s?O.threshold:s,u=r.findAllMatches,l=void 0===u?O.findAllMatches:u,d=r.minMatchCharLength,f=void 0===d?O.minMatchCharLength:d,v=r.includeMatches,g=void 0===v?O.includeMatches:v,y=r.ignoreLocation,p=void 0===y?O.ignoreLocation:y;if(t.length>F)throw new Error("Pattern length exceeds max of ".concat(F,"."));for(var m,b=t.length,k=e.length,M=Math.max(0,Math.min(o,k)),w=h,x=M,L=f>1||g,S=L?Array(k):[];(m=e.indexOf(t,x))>-1;){var _=I(t,{currentLocation:m,expectedLocation:M,distance:c,ignoreLocation:p});if(w=Math.min(_,w),x=m+b,L)for(var j=0;j=$;z-=1){var J=z-1,R=n[e.charAt(J)];if(L&&(S[J]=+!!R),K[z]=(K[z+1]<<1|1)&R,P&&(K[z]|=(A[z+1]|A[z])<<1|1|A[z+1]),K[z]&N&&(E=I(t,{errors:P,currentLocation:J,expectedLocation:M,distance:c,ignoreLocation:p}))<=w){if(w=E,(x=J)<=M)break;$=Math.max(1,2*M-x)}}if(I(t,{errors:P+1,currentLocation:M,expectedLocation:M,distance:c,ignoreLocation:p})>w)break;A=K}var U={isMatch:x>=0,score:Math.max(.001,E)};if(L){var B=function(){for(var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:O.minMatchCharLength,n=[],r=-1,i=-1,o=0,a=e.length;o=t&&n.push([r,i]),r=-1)}return e[o-1]&&o-r>=t&&n.push([r,o-1]),n}(S,f);B.length?g&&(U.indices=B):U.isMatch=!1}return U}function N(e){for(var t={},n=0,r=e.length;n1&&void 0!==arguments[1]?arguments[1]:{},o=i.location,a=void 0===o?O.location:o,c=i.threshold,s=void 0===c?O.threshold:c,h=i.distance,u=void 0===h?O.distance:h,l=i.includeMatches,d=void 0===l?O.includeMatches:l,f=i.findAllMatches,v=void 0===f?O.findAllMatches:f,g=i.minMatchCharLength,y=void 0===g?O.minMatchCharLength:g,p=i.isCaseSensitive,m=void 0===p?O.isCaseSensitive:p,b=i.ignoreLocation,k=void 0===b?O.ignoreLocation:b;if(r(this,e),this.options={location:a,threshold:s,distance:u,includeMatches:d,findAllMatches:v,minMatchCharLength:y,isCaseSensitive:m,ignoreLocation:k},this.pattern=m?t:t.toLowerCase(),this.chunks=[],this.pattern.length){var M=function(e,t){n.chunks.push({pattern:e,alphabet:N(e),startIndex:t})},w=this.pattern.length;if(w>F){for(var x=0,L=w%F,S=w-L;x-1&&(n.refIndex=e.idx),t.matches.push(n)}}))}function D(e,t){t.score=e.score}var K=function(){function e(n){var i=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},o=arguments.length>2?arguments[2]:void 0;if(r(this,e),this.options=t(t({},O),i),this.options.useExtendedSearch)throw new Error("Extended search is not available");this._keyStore=new w(this.options.keys),this.setCollection(n,o)}return o(e,[{key:"setCollection",value:function(e,t){if(this._docs=e,t&&!(t instanceof A))throw new Error("Incorrect 'index' type");this._myIndex=t||E(this.options.keys,this._docs,{getFn:this.options.getFn,fieldNormWeight:this.options.fieldNormWeight})}},{key:"add",value:function(e){y(e)&&(this._docs.push(e),this._myIndex.add(e))}},{key:"remove",value:function(){for(var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:function(){return!1},t=[],n=0,r=this._docs.length;n1&&void 0!==arguments[1]?arguments[1]:{}).limit,n=void 0===t?-1:t,r=this.options,i=r.includeMatches,o=r.includeScore,a=r.shouldSort,c=r.sortFn,s=r.ignoreFieldNorm,h=f(e)?f(this._docs[0])?this._searchStringList(e):this._searchObjectList(e):this._searchLogical(e);return function(e,t){var n=t.ignoreFieldNorm,r=void 0===n?O.ignoreFieldNorm:n;e.forEach((function(e){var t=1;e.matches.forEach((function(e){var n=e.key,i=e.norm,o=e.score,a=n?n.weight:null;t*=Math.pow(0===o&&a?Number.EPSILON:o,(a||1)*(r?1:i))})),e.score=t}))}(h,{ignoreFieldNorm:s}),a&&h.sort(c),v(n)&&n>-1&&(h=h.slice(0,n)),function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.includeMatches,i=void 0===r?O.includeMatches:r,o=n.includeScore,a=void 0===o?O.includeScore:o,c=[];return i&&c.push($),a&&c.push(D),e.map((function(e){var n=e.idx,r={item:t[n],refIndex:n};return c.length&&c.forEach((function(t){t(e,r)})),r}))}(h,this._docs,{includeMatches:i,includeScore:o})}},{key:"_searchStringList",value:function(e){var t=T(e,this.options),n=this._myIndex.records,r=[];return n.forEach((function(e){var n=e.v,i=e.i,o=e.n;if(y(n)){var a=t.searchIn(n),c=a.isMatch,s=a.score,h=a.indices;c&&r.push({item:n,idx:i,matches:[{score:s,value:n,norm:o,indices:h}]})}})),r}},{key:"_searchLogical",value:function(e){throw new Error("Logical search is not available")}},{key:"_searchObjectList",value:function(e){var t=this,n=T(e,this.options),r=this._myIndex,i=r.keys,o=r.records,a=[];return o.forEach((function(e){var r=e.$,o=e.i;if(y(r)){var s=[];i.forEach((function(e,i){s.push.apply(s,c(t._findMatches({key:e,value:r[i],searcher:n})))})),s.length&&a.push({idx:o,item:r,matches:s})}})),a}},{key:"_findMatches",value:function(e){var t=e.key,n=e.value,r=e.searcher;if(!y(n))return[];var i=[];if(u(n))n.forEach((function(e){var n=e.v,o=e.i,a=e.n;if(y(n)){var c=r.searchIn(n),s=c.isMatch,h=c.score,u=c.indices;s&&i.push({score:h,key:t,value:n,idx:o,norm:a,indices:u})}}));else{var o=n.v,a=n.n,c=r.searchIn(o),s=c.isMatch,h=c.score,l=c.indices;s&&i.push({score:h,key:t,value:o,norm:a,indices:l})}return i}}]),e}();return K.version="7.0.0",K.createIndex=E,K.parseIndex=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=t.getFn,r=void 0===n?O.getFn:n,i=t.fieldNormWeight,o=void 0===i?O.fieldNormWeight:i,a=e.keys,c=e.records,s=new A({getFn:r,fieldNormWeight:o});return s.setKeys(a),s.setIndexRecords(c),s},K.config=O,K},"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).Fuse=t(); +; +/* + PaperMod v7 + License: MIT https://github.com/adityatelange/hugo-PaperMod/blob/master/LICENSE + Copyright (c) 2020 nanxiaobei and adityatelange + Copyright (c) 2021-2024 adityatelange +*/ + +; +(()=>{var s,r,c,e={distance:1e3,iscasesensitive:!1,keys:["title","permalink","summary","content"],limit:10,location:0,minmatchcharlength:0,shouldsort:!0,threshold:.4},n=document.getElementById("searchResults"),t=document.getElementById("searchInput"),a=null,o=!1;window.onload=function(){let t=new XMLHttpRequest;t.onreadystatechange=function(){if(t.readyState===4)if(t.status===200){{let n=JSON.parse(t.responseText);if(n){let t={distance:100,threshold:.4,ignoreLocation:!0,keys:["title","permalink","summary","content"]};e&&(t={isCaseSensitive:e.iscasesensitive??!1,includeScore:e.includescore??!1,includeMatches:e.includematches??!1,minMatchCharLength:e.minmatchcharlength??1,shouldSort:e.shouldsort??!0,findAllMatches:e.findallmatches??!1,keys:e.keys??["title","permalink","summary","content"],location:e.location??0,threshold:e.threshold??.4,distance:e.distance??100,ignoreLocation:e.ignorelocation??!0}),s=new Fuse(n,t)}}}else console.log(t.responseText)},t.open("GET","../index.json"),t.send()};function i(e){document.querySelectorAll(".focus").forEach(function(e){e.classList.remove("focus")}),e?(e.focus(),document.activeElement=a=e,e.parentElement.classList.add("focus")):document.activeElement.parentElement.classList.add("focus")}function l(){o=!1,n.innerHTML=t.value="",t.focus()}t.onkeyup=function(){if(s){let t;if(e?t=s.search(this.value.trim(),{limit:e.limit}):t=s.search(this.value.trim()),t.length!==0){let e="";for(let n in t)e+=`
  • ${t[n].item.title} \xBB
  • `;n.innerHTML=e,o=!0,r=n.firstChild,c=n.lastChild}else o=!1,n.innerHTML=""}},t.addEventListener("search",function(){this.value||l()}),document.onkeydown=function(e){let d=e.key,s=document.activeElement,u=document.getElementById("searchbox").contains(s);if(s===t){let e=document.getElementsByClassName("focus");for(;e.length>0;)e[0].classList.remove("focus")}else a&&(s=a);if(d==="Escape")l();else if(!o||!u)return;else d==="ArrowDown"?(e.preventDefault(),s==t?i(n.firstChild.lastChild):s.parentElement!=c&&i(s.parentElement.nextSibling.lastChild)):d==="ArrowUp"?(e.preventDefault(),s.parentElement==r?i(t):s!=t&&i(s.parentElement.previousSibling.lastChild)):d==="ArrowRight"&&s.click()}})() \ No newline at end of file diff --git a/categories/import-2023-08-26-0406/index.html b/categories/import-2023-08-26-0406/index.html index 8b00e6e..ce40dd0 100644 --- a/categories/import-2023-08-26-0406/index.html +++ b/categories/import-2023-08-26-0406/index.html @@ -12,8 +12,8 @@ - - + + @@ -117,8 +117,13 @@
  • - - Tags + + Archives + +
  • +
  • + + Search
  • @@ -151,7 +156,7 @@ Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Nginx Proxy Manager Nginx Apache2 Traefik Authentik Portainer Yacht AdGuardHome Pihole Wazuh Zabbix Uptime Kuma Ghost (this blog) Wordpress Hydroxide (proton mail bridge) Calibre Smokeping Openspeedtest Grafana Prometheus InfluxDB PostgresSQL MySQL Watchtower Apache Guacamole Ansible Terraform Packer Vaultwarden Kasm Workspaces Jellyfin Plex Twingate Tailscale Headscale Wireguard LinkStack N8N Gotify Nextcloud Immich AI...

    August 26, 2023 · 1 min · 124 words · Mafyuh
    - + diff --git a/categories/import-2023-08-26-0406/index.xml b/categories/import-2023-08-26-0406/index.xml index 34b6c70..4650f3e 100644 --- a/categories/import-2023-08-26-0406/index.xml +++ b/categories/import-2023-08-26-0406/index.xml @@ -10,9 +10,9 @@ Software - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Sat, 26 Aug 2023 00:13:40 +0000 - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Just a straight forward list of pretty much everything that makes up my homelab. Or systems I&rsquo;ve ran in the past. Operating Systems Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Ubuntu doesnt play nice) Debian 11 Proxmox 8 Windows 10/11 TrueNAS Scale (virtualized) CasaOS (zimaboard) pfSense Applications/Containers diff --git a/categories/import-2023-09-08-0216/index.html b/categories/import-2023-09-08-0216/index.html index f31a8f8..c55039f 100644 --- a/categories/import-2023-09-08-0216/index.html +++ b/categories/import-2023-09-08-0216/index.html @@ -12,8 +12,8 @@ - - + + @@ -117,8 +117,13 @@
  • - - Tags + + Archives + +
  • +
  • + + Search
  • @@ -151,7 +156,7 @@ Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Nginx Proxy Manager Nginx Apache2 Traefik Authentik Portainer Yacht AdGuardHome Pihole Wazuh Zabbix Uptime Kuma Ghost (this blog) Wordpress Hydroxide (proton mail bridge) Calibre Smokeping Openspeedtest Grafana Prometheus InfluxDB PostgresSQL MySQL Watchtower Apache Guacamole Ansible Terraform Packer Vaultwarden Kasm Workspaces Jellyfin Plex Twingate Tailscale Headscale Wireguard LinkStack N8N Gotify Nextcloud Immich AI...

    August 26, 2023 · 1 min · 124 words · Mafyuh
    - + diff --git a/categories/import-2023-09-08-0216/index.xml b/categories/import-2023-09-08-0216/index.xml index 10aa47d..b1143ee 100644 --- a/categories/import-2023-09-08-0216/index.xml +++ b/categories/import-2023-09-08-0216/index.xml @@ -10,9 +10,9 @@ Software - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Sat, 26 Aug 2023 00:13:40 +0000 - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Just a straight forward list of pretty much everything that makes up my homelab. Or systems I&rsquo;ve ran in the past. Operating Systems Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Ubuntu doesnt play nice) Debian 11 Proxmox 8 Windows 10/11 TrueNAS Scale (virtualized) CasaOS (zimaboard) pfSense Applications/Containers diff --git a/categories/index.html b/categories/index.html index 4591b57..a383d1d 100644 --- a/categories/index.html +++ b/categories/index.html @@ -12,8 +12,8 @@ - - + + @@ -117,8 +117,13 @@
  • - - Tags + + Archives + +
  • +
  • + + Search
  • diff --git a/index.html b/index.html index 33741f1..221cf9a 100644 --- a/index.html +++ b/index.html @@ -13,8 +13,8 @@ - - + + @@ -131,8 +131,13 @@
  • - - Tags + + Archives + +
  • +
  • + + Search
  • diff --git a/index.xml b/index.xml index 22297f4..0124156 100644 --- a/index.xml +++ b/index.xml @@ -10,25 +10,25 @@ Resize Ubuntu VM Disk in Proxmox - https://mafyuh.github.io/post/resize-ubuntu-vm-disk/ + https://mafyuh.github.io/posts/resize-ubuntu-vm-disk/ Tue, 06 Feb 2024 02:58:07 +0000 - https://mafyuh.github.io/post/resize-ubuntu-vm-disk/ + https://mafyuh.github.io/posts/resize-ubuntu-vm-disk/ 1st step: Increase/resize disk from GUI console 2nd step: Extend physical drive partition and check free space with: sudo growpart /dev/sda 3 sudo pvdisplay sudo pvresize /dev/sda3 sudo pvdisplay 3rd step: Extend Logical volume sudo lvdisplay sudo lvextend -l +100%FREE /dev/ubuntu-vg/ubuntu-lv sudo lvdisplay 4th step: Resize Filesystem sudo resize2fs /dev/ubuntu-vg/ubuntu-lv sudo fdisk -l Proton Mail - SimpleLogin authentik Social Login Setup - https://mafyuh.github.io/post/proton-mail-authentik-social-login-setup/ + https://mafyuh.github.io/posts/proton-mail-authentik-social-login-setup/ Sun, 12 Nov 2023 16:20:00 +0000 - https://mafyuh.github.io/post/proton-mail-authentik-social-login-setup/ + https://mafyuh.github.io/posts/proton-mail-authentik-social-login-setup/ This is just a quick guide on how to authenticate your authentik users with Proton using SimpleLogin OIDC. To accomplish this, first create a SimpleLogin acct by logging in with Proton. Once thats done go to https://app.simplelogin.io/developer and create a website. Give it your authentik URL. Then go to Oauth Settings and copy your client ID and secret for next step. add your authentik URL in redirect URL like this https://auth. How To Automate Jellyfin Issue Handling - https://mafyuh.github.io/post/how-to-automate-jellyfin-issue-handling/ + https://mafyuh.github.io/posts/how-to-automate-jellyfin-issue-handling/ Sat, 11 Nov 2023 16:20:00 +0000 - https://mafyuh.github.io/post/how-to-automate-jellyfin-issue-handling/ + https://mafyuh.github.io/posts/how-to-automate-jellyfin-issue-handling/ I wanted a way to automate when users tell me a video on my Jellyfin server has an issue. After alot of trial and error, ChatGPT, Bard and I came up with this automation. Requirements My only requirements when making this was that it was free and self-hostable. Not even any NPM extensions are required in AP. Actual Software requirements are: Sonarr Radarr Overseerr/Jellyseerr Optional @@ -36,35 +36,35 @@ SMTP server or ability to send SMTP messages (can also use discord) ActivePieces How to authenticate Guacamole via authentik with Cloudflare and Nginx Proxy Manager - https://mafyuh.github.io/post/how-to-authenticate-guacamole-authentik-nginxproxymanager/ + https://mafyuh.github.io/posts/how-to-authenticate-guacamole-authentik-nginxproxymanager/ Sun, 29 Oct 2023 16:20:00 +0000 - https://mafyuh.github.io/post/how-to-authenticate-guacamole-authentik-nginxproxymanager/ + https://mafyuh.github.io/posts/how-to-authenticate-guacamole-authentik-nginxproxymanager/ authentik&rsquo;s docs have a guide already for Guacamole. You can find that here. Follow all the instructions there, (especially the part where you create a user in Guacamole with the USERNAME of your email. not just filling in the email), but if you are using Cloudflare as our DNS you may run into problems. Such as infinite redirect loop. Error 403 Forbidden While it was looping, I checked my Guacamole docker container logs in Portainer, and found the 403 Forbidden error. How to authenticate Zammad via SAML with Nginx Proxy Manager - https://mafyuh.github.io/post/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/ + https://mafyuh.github.io/posts/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/ Sun, 29 Oct 2023 16:20:00 +0000 - https://mafyuh.github.io/post/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/ + https://mafyuh.github.io/posts/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/ If you are getting error messages like: 422: the change you wanted was rejected. message from saml: actioncontroller::invalidauthenticitytoken Just make sure you set these in your Nginx Proxy Manager hosts Advanced field: location / { proxy_pass http://zammad:8080; # Replace proxy_set_header Host $host; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Ssl on; proxy_set_header X-Forwarded-Port $server_port; proxy_set_header X-Forwarded-Host $host; } I spent way too long trying to figure this out, reading through Github issues, breaking my SAML provider and Zammad configs, starting over, when the whole time it was just good old nginx header issues. How To Authenticate KASM via authentik - https://mafyuh.github.io/post/how-to-authenticate-kasm-via-authentik/ + https://mafyuh.github.io/posts/how-to-authenticate-kasm-via-authentik/ Sat, 30 Sep 2023 16:20:00 +0000 - https://mafyuh.github.io/post/how-to-authenticate-kasm-via-authentik/ + https://mafyuh.github.io/posts/how-to-authenticate-kasm-via-authentik/ You could do this with OpenID as well but this method is using SAML. This guide assumes you already have running instances of Kasm Workspaces and authentik. The official authentik docs dont have a Kasm Integration listed at the time. So I thought I would help out anyone who is trying to integrate these services via SAML. authentik&rsquo;s SAML docs can be found here. Setting up Kasm In the Kasm Workspaces admin, click Access Management - Authentication - SAML and create a new configuration. Software - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Sat, 26 Aug 2023 00:13:40 +0000 - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Just a straight forward list of pretty much everything that makes up my homelab. Or systems I&rsquo;ve ran in the past. Operating Systems Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Ubuntu doesnt play nice) Debian 11 Proxmox 8 Windows 10/11 TrueNAS Scale (virtualized) CasaOS (zimaboard) pfSense Applications/Containers diff --git a/post/how-to-authenticate-guacamole-authentik-nginxproxymanager/index.html b/post/how-to-authenticate-guacamole-authentik-nginxproxymanager/index.html index bdcdae9..c8fbfec 100644 --- a/post/how-to-authenticate-guacamole-authentik-nginxproxymanager/index.html +++ b/post/how-to-authenticate-guacamole-authentik-nginxproxymanager/index.html @@ -1,7 +1,7 @@ - + @@ -10,16 +10,16 @@ - + - - - - - + + + + + - + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    +
    + +

    + +

    + +
    + + +
    + + +
    +
    +
    + + + + + + + + + + + + + + + diff --git a/posts/how-to-authenticate-guacamole-authentik-nginxproxymanager/index.html b/posts/how-to-authenticate-guacamole-authentik-nginxproxymanager/index.html new file mode 100644 index 0000000..e97fb8d --- /dev/null +++ b/posts/how-to-authenticate-guacamole-authentik-nginxproxymanager/index.html @@ -0,0 +1,377 @@ + + + + + + + +How to authenticate Guacamole via authentik with Cloudflare and Nginx Proxy Manager | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    +
    + +

    + How to authenticate Guacamole via authentik with Cloudflare and Nginx Proxy Manager +

    + +
    +
    + + Table of Contents + + + +
    +
    + +

    authentik’s docs have a guide already for Guacamole. You can find that here. Follow all the instructions there, (especially the part where you create a user in Guacamole with the USERNAME of your email. not just filling in the email), but if you are using Cloudflare as our DNS you may run into problems. Such as infinite redirect loop.

    +

    Error 403 Forbidden

    +

    While it was looping, I checked my Guacamole docker container logs in Portainer, and found the 403 Forbidden error.

    +
    22:03:59.418 [http-nio-8080-exec-2] INFO o.a.g.a.o.t.TokenValidationService - Rejected invalid OpenID token: JWT processing failed. Additional details: [[17] Unable to process JOSE object (cause: org.jose4j.lang.UnresolvableKeyException: Unable to find a suitable verification key for JWS w/ header {"alg":"RS256","kid":"xxx","typ":"JWT"} due to an unexpected exception (java.io.IOException: Non 200 status code (403 Forbidden) returned from https://example.com/application/o/guacamole/jwks/?exclude_x5) while obtaining or using keys from JWKS endpoint at https://example.com/application/o/guacamole/jwks/?exclude_x5): JsonWebSignature{"alg":"RS256","kid":"xxx","typ":"JWT"}
    +

    I assumed it had something to do with my Nginx Proxy Manager and the way I was proxying Guacamole, I do have WebSocket support and block common exploits enabled, their docs give a nginx config that I added under advanced.

    +
    location /guacamole/ {
    +    proxy_pass http://HOSTNAME:8080;
    +    proxy_buffering off;
    +    proxy_http_version 1.1;
    +    proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
    +    proxy_set_header Upgrade $http_upgrade;
    +    proxy_set_header Connection $http_connection;
    +    access_log off;
    +}
    +

    I messed around with settings individually for hours, reading their docs, tried oznu’s Guacamole image as well, this time with errors about the postgres version being incompatible. Figured it could be something with Cloudflare so turned down my HTTPS settings. Nada. Tried SAML, more errors. Finally found this github issue and thanks to Fma965 for finding the solution.

    +

    Go to your Cloudflare Dashboard. Click on your domains summary and then on the left tab find Rules.

    +

    Under Page Rules - Create a New Page Rule, set the URL as your jwks URL from authentik’s provider summary. Under pick a setting, choose Browser Integrity Check and make sure its unchecked. Save.

    +

    Page Rules Images +

    +

    This finally got me authenticated into my Guacamole instance via authentik. I spent way too much time on this integration. Anyways, hope this guide helps someone who may be in my shoes.

    + + +
    + + +
    +
    + + + + + + + + + + + + + + + diff --git a/posts/how-to-authenticate-kasm-via-authentik/index.html b/posts/how-to-authenticate-kasm-via-authentik/index.html new file mode 100644 index 0000000..64e9e39 --- /dev/null +++ b/posts/how-to-authenticate-kasm-via-authentik/index.html @@ -0,0 +1,396 @@ + + + + + + + +How To Authenticate KASM via authentik | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    +
    + +

    + How To Authenticate KASM via authentik +

    + +
    +
    + + Table of Contents + + + +
    +
    + +

    You could do this with OpenID as well but this method is using SAML. This guide assumes you already have running instances of Kasm Workspaces and authentik.

    +

    The official authentik docs dont have a Kasm Integration listed at the time. So I thought I would help out anyone who is trying to integrate these services via SAML. authentik’s SAML docs can be found here.

    +

    Setting up Kasm

    +

    In the Kasm Workspaces admin, click Access Management - Authentication - SAML and create a new configuration. Make sure you enable and make default after testing. You will probably find yourself switching between tabs alot, its best to start creating them both at the same time as you need links from each.

    +

    KASM SAML Config +

    + +

    authentik Config +

    +

    Setting up authentik

    +

    In the authentik admin, under Applications, create a new SAML provider. Once you created a provider, create an Application and set its provider to the newly created kasm provider. For simplicity sake, the provider and application name is kasm. (kasms pictured)

    +

    authentik Config +

    + +

    Make sure you save you changes. You should now be able to test SAML at the bottom of the page, once tested, I recommend opening a incognito tab and loading your KASM website.

    +

    authentik Config +

    +

    You should now be able to authenticate yourself using SAML via authentik. I recommend going back to your admin session and adding your newly created user to the admin group. Also if it should only be you accessing this via authentik, I would change the kasm Application in authentik and bind it to your user.

    +

    Thank you to u/agent-squirrel and this subreddit post on helping me with the NameID Attribute part!

    + + +
    + + +
    +
    + + + + + + + + + + + + + + + diff --git a/posts/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/index.html b/posts/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/index.html new file mode 100644 index 0000000..f657620 --- /dev/null +++ b/posts/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/index.html @@ -0,0 +1,362 @@ + + + + + + + +How to authenticate Zammad via SAML with Nginx Proxy Manager | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    +
    + +

    + How to authenticate Zammad via SAML with Nginx Proxy Manager +

    + +
    + +

    If you are getting error messages like:

    +
    422: the change you wanted was rejected. message from saml: actioncontroller::invalidauthenticitytoken
    +

    Just make sure you set these in your Nginx Proxy Manager hosts Advanced field:

    +
    location / {
    +  proxy_pass http://zammad:8080; # Replace
    +  proxy_set_header  Host $host;
    +  proxy_set_header  X-Forwarded-For $proxy_add_x_forwarded_for;
    +  proxy_set_header  X-Forwarded-Proto $scheme;
    +  proxy_set_header  X-Forwarded-Ssl on;
    +  proxy_set_header  X-Forwarded-Port $server_port;
    +  proxy_set_header  X-Forwarded-Host $host;
    +}
    +

    I spent way too long trying to figure this out, reading through Github issues, breaking my SAML provider and Zammad configs, starting over, when the whole time it was just good old nginx header issues.

    +

    Hope this helps someone out. Fix was found on this rails github issue.

    +

    (https://github.com/rails/rails/issues/22965)

    + + +
    + + +
    +
    + + + + + + + + + + + + + + + diff --git a/posts/how-to-automate-jellyfin-issue-handling/index.html b/posts/how-to-automate-jellyfin-issue-handling/index.html new file mode 100644 index 0000000..4b37068 --- /dev/null +++ b/posts/how-to-automate-jellyfin-issue-handling/index.html @@ -0,0 +1,1260 @@ + + + + + + + +How To Automate Jellyfin Issue Handling | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    +
    + +

    + How To Automate Jellyfin Issue Handling +

    + +
    + +

    I wanted a way to automate when users tell me a video on my Jellyfin server has an issue. After alot of trial and error, ChatGPT, Bard and I came up with this automation.

    +

    Requirements

    +

    My only requirements when making this was that it was free and self-hostable. Not even any NPM extensions are required in AP. +Actual Software requirements are:

    +
      +
    1. Sonarr
    2. +
    3. Radarr
    4. +
    5. Overseerr/Jellyseerr
    6. +
    +

    Optional

    +
      +
    1. SMTP server or ability to send SMTP messages (can also use discord)
    2. +
    3. ActivePieces or any other automation platform that supports TS. (Zapier, n8n, etc)
    4. +
    +

    Here’s a great AP setup and how-to video:

    + +
    + +
    + +

    Note: I didn’t do any of the ngrok stuff. I just have Nginx Proxy manager setup with a wildcard certificate. Then just give a domain name and point and its ip:8080. No special Nginx config needed. Make sure you set AP_FRONTEND_URL in .env

    +

    This blog post is rather long, if you prefer to see the code on git you can find all this code here.

    +

    How it Works

    +

    Whenever a user Reports an Issue in Jellyseerr, a webhook is sent to activepieces (AP) with the Issue data, this triggers the automation to mark as failed, delete file, re-search, refresh Jellyfin Libraries and Resolve the original issue with comment. There is an optional feature to approve or deny the automation.

    +

    Works across Radarr and Sonarr, as the issue reported can be either Movie or TV show.

    +

    Only caveat is if the issue is an entire Season , we just mark the issue as resolved and leave a comment saying to submit an issue for each episode individually

    +

    Works on my Jellyfin, Jellyseer, Radarr and Sonarr setup. I dont use Plex but all you would have to change is the Jellyfin Refresh Library Request to match Plex’s equivalent.

    +

    Here is a pic of the full automation.

    +

    Automation Image +

    +

    Everything of value is logged to the console so check there for errors. Lets start breaking it down.

    +

    #1 Jellyseer Issue Reported

    +

    First thing is create a flow in AP, select a trigger, and search for webhook. This will give you the webhook URL for Jellyseerr. +Next, in Jellyseerr, under Settings - Users - Default Permissions make sure Report Issues is checked and save changes. +Then under Settings - Notifications - Webhook make a webhook notification, with the URL from AP, and just enabling Issue Reported and Issue Reopened. +This should look as follows (dont worry about my payload showing mediaId, this has since been deleted)

    +

    Jellyseer Image +

    +

    Here is my full JSON payload just in case:

    +
    {
    +    "notification_type": "{{notification_type}}",
    +    "event": "{{event}}",
    +    "subject": "{{subject}}",
    +    "message": "{{message}}",
    +    "image": "{{image}}",
    +    "{{media}}": {
    +        "media_type": "{{media_type}}",
    +        "tmdbId": "{{media_tmdbid}}",
    +        "tvdbId": "{{media_tvdbid}}",
    +        "status": "{{media_status}}",
    +        "status4k": "{{media_status4k}}"
    +    },
    +    "{{request}}": {
    +        "request_id": "{{request_id}}",
    +        "requestedBy_email": "{{requestedBy_email}}",
    +        "requestedBy_username": "{{requestedBy_username}}",
    +        "requestedBy_avatar": "{{requestedBy_avatar}}",
    +        "requestedBy_settings_discordId": "{{requestedBy_settings_discordId}}",
    +        "requestedBy_settings_telegramChatId": "{{requestedBy_settings_telegramChatId}}"
    +    },
    +    "{{issue}}": {
    +        "issue_id": "{{issue_id}}",
    +        "issue_type": "{{issue_type}}",
    +        "issue_status": "{{issue_status}}",
    +        "reportedBy_email": "{{reportedBy_email}}",
    +        "reportedBy_username": "{{reportedBy_username}}",
    +        "reportedBy_avatar": "{{reportedBy_avatar}}",
    +        "reportedBy_settings_discordId": "{{reportedBy_settings_discordId}}",
    +        "reportedBy_settings_telegramChatId": "{{reportedBy_settings_telegramChatId}}"
    +    },
    +    "{{comment}}": {
    +        "comment_message": "{{comment_message}}",
    +        "commentedBy_email": "{{commentedBy_email}}",
    +        "commentedBy_username": "{{commentedBy_username}}",
    +        "commentedBy_avatar": "{{commentedBy_avatar}}",
    +        "commentedBy_settings_discordId": "{{commentedBy_settings_discordId}}",
    +        "commentedBy_settings_telegramChatId": "{{commentedBy_settings_telegramChatId}}"
    +    },
    +    "{{extra}}": []
    +}
    +

    You should be able to Report an issue on a random movie in Jellyseerr and then go to the webhook trigger and choose Generate sample data, and you should be able to see the data from the request. I recommend doing this and creating an issue for an example movie, TV series( All Seasons) and a TV Series (1 Season)

    + +

    In AP add the next step and search Approval, then create approval links.

    +

    (Optional) #3 Send Email

    +

    This is so I can either approve or deny the file from being deleted. Maybe it’s a client issue and I know for a fact my file is good and I dont want deleted. Thus the links are sent to me along with the some data from the request, so I know what I am approving/denying.

    +

    You can use the core SMTP feature but its limited to text. I wanted some more customizability so I chose Resend (supports html) and set up an acct there with one of my extra domains.

    +

    You don’t have to use email, you can use Discord, SMS, any generic http request or whatever you want. I just use email since I pay for my domains and pay Proton Mail for emails so might as well use em.

    +

    Not gonna get too into this, I dont care too much about it atm, customize your email to your liking, but I’ll post my somewhat working HTML body here. I literally just copied what Bard gave me, added in data from response and tested and said looks good enough, glitches on my mobile too.

    +
    <!DOCTYPE html>
    +<html lang="en">
    +<head>
    +<meta charset="UTF-8">
    +<meta name="viewport" content="width=device-width, initial-scale=1.0">
    +<title>Jellyseerr Issue Reported</title>
    +<style>
    +body {
    +  font-family: sans-serif;
    +  margin: 0;
    +  padding: 0;
    +  background-color: #222;
    +  color: #fff;
    +}
    +.container {
    +  width: 80%;
    +  margin: 0 auto;
    +  padding: 20px;
    +  background-color: #333;
    +  border-radius: 10px;
    +  box-shadow: 0px 2px 5px rgba(0, 0, 0, 0.1);
    +}
    +.header {
    +  display: flex;
    +  justify-content: space-between;
    +  align-items: center;
    +  padding-bottom: 20px;
    +  border-bottom: 1px solid #555;
    +}
    +.header h1 {
    +  font-size: 24px;
    +  font-weight: bold;
    +  margin: 0;
    +  color: #fff;
    +}
    +.header img {
    +  width: 50px;
    +  height: 50px;
    +  border-radius: 50%;
    +  object-fit: cover;
    +}
    +.content {
    +  margin: 0 auto;
    +  text-align: center;
    +}
    +.issue-subject {
    +  font-size: 18px;
    +  font-weight: bold;
    +  margin-bottom: 10px;
    +  color: #fff;
    +}
    +.issue-message {
    +  font-size: 16px;
    +  line-height: 1.5;
    +  margin-bottom: 20px;
    +  color: #ccc;
    +}
    +.issue-image {
    +  width: 100%;
    +  height: auto;
    +  margin-bottom: 20px;
    +}
    +.buttons {
    +  display: flex;
    +  justify-content: space-between;
    +}
    +.button {
    +  background-color: #007bff;
    +  color: #fff;
    +  padding: 10px 20px;
    +  border-radius: 5px;
    +  cursor: pointer;
    +  text-decoration: none;
    +}
    +.button:hover {
    +  background-color: #0056b3;
    +}
    +.disapprove-button {
    +  background-color: #dc3545;
    +  color: #fff;
    +  padding: 10px 20px;
    +  border-radius: 5px;
    +  cursor: pointer;
    +  text-decoration: none;
    +}
    +.disapprove-button:hover {
    +  background-color: #bd2830;
    +}
    +</style>
    +</head>
    +<body>
    +<div class="container">
    +<div class="header">
    +<img src="https://your-logo-url" alt="Jellyseerr Logo">
    +<h1>Jellyseerr Issue Reported</h1>
    +</div>
    +<div class="content">
    +<div class="issue-subject">
    +Jellyseerr Issue Reported
    +</div>
    +<div class="issue-message">
    +This issue was submitted by 1. Jellyseerr Issue Reported body issue reportedBy_username.
    +<br>
    +The reason for the issue:1. Jellyseerr Issue Reported body message
    +<br>
    +Please review the issue and take appropriate action.
    +<br>
    +<img src="  1. Jellyseerr Issue Reported body image  ">
    +</div>
    +<div class="buttons">
    +<a href="2. Create Approval Links approvalLink  "><button class="button">Approve</button></a>
    +<a href="2. Create Approval Links disapprovalLink  "><button class="disapprove-button">Deny</button></a>
    +</div>
    +</div>
    +</div>
    +</body>
    +</html>
    +

    And here’s what an email looks like:

    +

    Email Look +

    +

    (Optional) #4 Wait for Approval

    +

    Pauses flow until I approve or deny.

    +

    #5 Radarr/Sonarr Branch

    +

    As stated previously, I wanted this to work regardless if Movie or TV show. So using the core Branch feature we just say that if the media_type value from the issue contains the text movie, its true.

    +

    Radarr AP Image +

    +

    #6 Radarr Mark As Failed

    +

    All I do here is the Code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code (CASE SENSITIVE)

    +

    Radarr AP Image +

    +

    Here is the code. Just replace api key and base url:

    +
    export const code = async (inputs) => {
    +  const issueSubject = inputs.issue.subject;
    +  const movieNameRegex = /(.*)\s\((\d{4})\)/;
    +  const match = movieNameRegex.exec(issueSubject);
    +
    +  if (match) {
    +    const movieName = match[1];
    +    const year = match[2];
    +    const tmdbId = inputs.issue.media.tmdbId;
    +
    +    console.log(`Movie name: ${movieName}`);
    +    console.log(`Year: ${year}`);
    +    console.log(`TMDB ID: ${tmdbId}`);
    +
    +    // Define your Radarr API key and base URL
    +    const radarrApiKey = 'your-api-key'; // Replace with your Radarr API key
    +    const radarrBaseUrl = 'https://radarr.example.com/api/v3/';
    +
    +    // Define a function to make API requests to Radarr
    +    const makeRadarrRequest = async (endpoint, method = 'GET') => {
    +      const apiUrl = radarrBaseUrl + endpoint;
    +      console.log(`Calling Radarr API: ${apiUrl}`);
    +
    +      const response = await fetch(apiUrl, {
    +        method,
    +        headers: {
    +          'X-Api-Key': radarrApiKey,
    +        },
    +      });
    +
    +      if (response.ok) {
    +        return await response.json();
    +      } else {
    +        console.error(`Radarr API request failed: ${response.statusText}`);
    +        return null;
    +      }
    +    };
    +
    +    // Use Radarr's API to look up the movie by TMDB ID
    +    const radarrApiResponseData = await makeRadarrRequest(`movie?tmdbId=${tmdbId}`);
    +
    +    if (radarrApiResponseData && radarrApiResponseData.length > 0) {
    +      const movieId = radarrApiResponseData[0].id; // Get the Radarr ID of the first movie
    +      console.log('Radarr Movie ID:', movieId);
    +
    +      // Use the Radarr movie ID to get the history of the movie
    +      const historyApiResponseData = await makeRadarrRequest(`history/movie?movieId=${movieId}`);
    +
    +      if (historyApiResponseData && historyApiResponseData.length > 0) {
    +        const historyId = historyApiResponseData[0].id; // Get the history ID
    +        console.log('History ID:', historyId);
    +
    +        // Use the history ID to mark the movie as failed
    +        const markFailedResponse = await makeRadarrRequest(`history/failed/${historyId}`, 'POST');
    +
    +        if (markFailedResponse) {
    +          console.log('Movie successfully marked as failed.');
    +        } else {
    +          console.error('Failed to mark movie as failed');
    +        }
    +      } else {
    +        console.error('No history found for movie ID:', movieId);
    +      }
    +    } else {
    +      console.error('No movies found for TMDB ID:', tmdbId);
    +    }
    +  }
    +};
    +

    #7 Delay 5 seconds

    +

    Give time to process.

    +

    #8 Delete Movie File

    +

    I didn’t want to delete the actual movie from Radarr, but just the file itself, thus alot of trial and error, but a working script. +All I do here is the Code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code

    +
    export const code = async (inputs) => {
    +  const issueSubject = inputs.issue.subject;
    +  const movieNameRegex = /(.*)\s\((\d{4})\)/;
    +  const match = movieNameRegex.exec(issueSubject);
    +
    +  if (match) {
    +    const movieName = match[1];
    +    const year = match[2];
    +    const tmdbId = inputs.issue.media.tmdbId;
    +
    +    console.log(`Movie name: ${movieName}`);
    +    console.log(`Year: ${year}`);
    +    console.log(`TMDB ID: ${tmdbId}`);
    +
    +    // Define your Radarr API key
    +    const radarrApiKey = 'your-api-key'; // Replace with your Radarr API key
    +    const radarrBaseUrl = 'https://radarr.example.com/api/v3';
    +
    +    // Use Radarr's API to look up the movie by TMDB ID and get the Radarr ID
    +    const radarrApiUrl = `${radarrBaseUrl}/movie?tmdbId=${tmdbId}`;
    +    console.log('Calling Radarr API to look up the movie...');
    +
    +    const radarrApiResponse = await fetch(radarrApiUrl, {
    +      method: 'GET',
    +      headers: {
    +        'X-Api-Key': radarrApiKey,
    +      },
    +    });
    +
    +    if (radarrApiResponse.ok) {
    +      console.log('Radarr API lookup successful.');
    +      const radarrApiResponseData = await radarrApiResponse.json();
    +
    +      if (radarrApiResponseData.length > 0) {
    +        // If the response is an array, you should loop through the results
    +        // and access the Radarr ID for each movie.
    +        for (const movie of radarrApiResponseData) {
    +          const radarrMovieId = movie.movieFile.id;
    +          console.log('Radarr Movie ID:', radarrMovieId);
    +
    +          // Use the Radarr movie ID to delete the corresponding movie file
    +          const deleteMovieFileUrl = `${radarrBaseUrl}/movieFile/${radarrMovieId}`;
    +          console.log(`Calling Radarr API to delete movie file: ${deleteMovieFileUrl}`);
    +
    +          const deleteMovieFileResponse = await fetch(deleteMovieFileUrl, {
    +            method: 'DELETE',
    +            headers: {
    +              'X-Api-Key': radarrApiKey,
    +            },
    +          });
    +
    +          if (deleteMovieFileResponse.ok) {
    +            console.log(`Movie file successfully deleted.`);
    +          } else {
    +            console.error(`Failed to delete movie file: ${deleteMovieFileResponse.statusText}`);
    +          }
    +        }
    +      } else {
    +        console.error('No movies found for TMDB ID:', tmdbId);
    +      }
    +    } else {
    +      console.error('Radarr API lookup failed:', radarrApiResponse.statusText);
    +    }
    +  }
    +};
    +

    #9 Delay 5 seconds

    +

    #10 Search in Radarr

    +

    Researches for movie just deleted.

    +

    All I do here is the Code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code

    +
    export const code = async (inputs) => {
    +  const issueSubject = inputs.issue.subject;
    +  const movieNameRegex = /(.*)\s\((\d{4})\)/;
    +  const match = movieNameRegex.exec(issueSubject);
    +
    +  if (match) {
    +    const movieName = match[1];
    +    const year = match[2];
    +    const tmdbId = inputs.issue.media.tmdbId;
    +
    +    console.log(`Movie name: ${movieName}`);
    +    console.log(`Year: ${year}`);
    +    console.log(`TMDB ID: ${tmdbId}`);
    +
    +    // Define your Radarr API key
    +    const radarrApiKey = 'your-api-key'; // Replace with your Radarr API key
    +    const radarrBaseUrl = 'https://radarr.example.com/api/v3'
    +
    +    // Use Radarr's API to look up the movie by TMDB ID and get the Radarr ID
    +    const radarrApiUrl = `${radarrBaseUrl}/movie?tmdbId=${tmdbId}`;
    +    console.log('Calling Radarr API to look up the movie...');
    +
    +    const radarrApiResponse = await fetch(radarrApiUrl, {
    +      method: 'GET',
    +      headers: {
    +        'X-Api-Key': radarrApiKey,
    +      },
    +    });
    +
    +    if (radarrApiResponse.ok) {
    +      console.log('Radarr API lookup successful.');
    +      const radarrApiResponseData = await radarrApiResponse.json();
    +
    +      if (radarrApiResponseData.length > 0) {
    +        const movieId = radarrApiResponseData[0].id; // Get the Radarr ID of the first movie
    +        console.log('Radarr Movie ID:', movieId);
    +
    +        // Trigger Radarr to search for the movie and download
    +        const searchUrl = `${radarrBaseUrl}/command`;
    +        console.log(`Calling Radarr API to search for the movie: ${searchUrl}`);
    +
    +        const searchRequestBody = {
    +          name: 'MoviesSearch',
    +          movieIds: [movieId],
    +        };
    +
    +        const searchResponse = await fetch(searchUrl, {
    +          method: 'POST',
    +          headers: {
    +            'X-Api-Key': radarrApiKey,
    +            'Content-Type': 'application/json',
    +          },
    +          body: JSON.stringify(searchRequestBody),
    +        });
    +
    +        if (searchResponse.ok) {
    +          console.log('Radarr movie search initiated.');
    +        } else {
    +          console.error(`Failed to initiate movie search: ${searchResponse.statusText}`);
    +        }
    +      } else {
    +        console.error('No movies found for TMDB ID:', tmdbId);
    +      }
    +    } else {
    +      console.error('Radarr API lookup failed:', radarrApiResponse.statusText);
    +    }
    +  }
    +};
    +

    #11 Delay 4 minutes

    +

    This gives your download client time to download and transfer file to mapped directory. I have Gig+ internet and 99% of the time everything is done in 4 minutes.

    +

    #12 Scan JF Libraries

    +

    Using core HTTP feature, send a http POST request to https://jellyfin.domain.com/Library/Refresh with Headers +X-MediaBrowser-Token and value is your Jellyfin API Key

    +

    I only do this as Jellyfin doesn’t scan my NAS whenever I add a new file.

    +

    #13 Add Comment/Resolve Issue

    +

    This just automatically resolves the issue in Jellyseerr and adds a comment letting the user know action was taken.

    +

    All I do here is the Code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code

    +
    export const code = async (inputs) => {
    +  const issueId = inputs.issue.issue_id;
    +  const apiKey = 'your-api-key'; // Replace with your actual API key
    +  const baseURL = 'https://jellyseerr.example.com/api/v1'
    +
    +  const commentApiUrl = `${baseURL}/issue/${issueId}/comment`;
    +  const statusApiUrl = `${baseURL}/issue/${issueId}/resolved`;
    +
    +  const headers = {
    +    'Content-Type': 'application/json',
    +    'X-Api-Key': apiKey,
    +  };
    +
    +  const commentData = {
    +    message: 'Your issue has been approved and a new version of the content has been automatically downloaded and updated in Jellyfin. Your issue has been set to Resolved. If you are still experiencing problems, re-open your issue.',
    +  };
    +
    +  const commentRequestOptions = {
    +    method: 'POST',
    +    headers: headers,
    +    body: JSON.stringify(commentData),
    +  };
    +
    +  try {
    +    // Post comment
    +    const commentResponse = await fetch(commentApiUrl, commentRequestOptions);
    +    const commentData = await commentResponse.json();
    +    console.log(commentData);
    +
    +    // Update status
    +    const statusRequestOptions = {
    +      method: 'POST', // or PUT depending on your API
    +      headers: headers,
    +      // Add any additional data required to update the status
    +    };
    +
    +    const statusResponse = await fetch(statusApiUrl, statusRequestOptions);
    +    const statusData = await statusResponse.json();
    +    console.log(statusData);
    +
    +    return true;
    +  } catch (error) {
    +    console.error(error);
    +    return false;
    +  }
    +};
    +

    We are now done with the Radarr flow. Moving onto Sonarr.

    +

    #14 Branch Episodes and Seasons

    +

    With the issue data, we also get an “extra” field which is where the requests Affected Episode Number and Affected Season Number are. What this branch does is see if there is an affected Episode Number by seeing if that field in the data exists. You will have to create an issue for a TV show and say an entire season is affected. Then use that sample data, go back to this branch and add the value

    +
      +
    1. Jellyseerr Issue Reported body extra 1 as pictured +Episode Branch Images +
    2. +
    +

    #15 Add Comment/Resolve Issue

    +

    This path meant the user reported an issue on an entire season and basically sends a response to them telling them to do it individually. I probably could have gotten a script working for this but I spent a few hours on it and frustratingly gave up. Maybe I will update this in the future but for now idrc.

    +

    Again, all I do here is the code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code

    +
    export const code = async (inputs) => {
    +  const issueId = inputs.issue.issue_id;
    +  const apiKey = 'your-api-key'; // Replace with your actual API key
    +  const baseURL = 'https://jellyseerr.example.com/api/v1'
    +
    +  const commentApiUrl = `${baseURL}/issue/${issueId}/comment`;
    +  const statusApiUrl = `${baseURL}/issue/${issueId}/resolved`;
    +
    +  const headers = {
    +    'Content-Type': 'application/json',
    +    'X-Api-Key': apiKey,
    +  };
    +
    +  const commentData = {
    +    message: 'Please do not report an entire season as the issue. Specify each Episode number. Please delete this issue and resubmit. Your issue has been automatically marked as Resolved.',
    +  };
    +
    +  const commentRequestOptions = {
    +    method: 'POST',
    +    headers: headers,
    +    body: JSON.stringify(commentData),
    +  };
    +
    +  try {
    +    // Post comment
    +    const commentResponse = await fetch(commentApiUrl, commentRequestOptions);
    +    const commentData = await commentResponse.json();
    +    console.log(commentData);
    +
    +    // Update status
    +    const statusRequestOptions = {
    +      method: 'POST', 
    +      headers: headers,
    +    };
    +
    +    const statusResponse = await fetch(statusApiUrl, statusRequestOptions);
    +    const statusData = await statusResponse.json();
    +    console.log(statusData);
    +
    +    return true;
    +  } catch (error) {
    +    console.error(error);
    +    return false;
    +  }
    +};
    +

    #16 Mark as Failed Sonarr

    +

    Again, all I do here is the code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code

    +
    export const code = async (inputs) => {
    +  const issueSubject = inputs.issue.subject;
    +  const tvShowNameRegex = /(.*)\s\((\d{4})\)/;
    +  const match = tvShowNameRegex.exec(issueSubject);
    +
    +  if (match) {
    +    const tvShowName = match[1];
    +    const year = match[2];
    +    const tvdbId = inputs.issue.media.tvdbId; // Using TVDB ID for TV shows
    +
    +    console.log(`TV Show name: ${tvShowName}`);
    +    console.log(`Year: ${year}`);
    +    console.log(`TVDB ID: ${tvdbId}`);
    +
    +    // Define your Sonarr API key and base URL
    +    const sonarrApiKey = 'your-api-key'; // Replace with your Sonarr API key
    +    const sonarrBaseUrl = 'https://sonarr.example.com/api/v3';
    +
    +    // Use Sonarr's API to look up the series by TVDB ID and get the Sonarr ID
    +    const seriesResponse = await fetch(`${sonarrBaseUrl}/series/lookup?term=tvdb:${tvdbId}`, {
    +      method: 'GET',
    +      headers: {
    +        'X-Api-Key': sonarrApiKey,
    +      },
    +    });
    +
    +    if (seriesResponse.ok) {
    +      const seriesData = await seriesResponse.json();
    +
    +      if (seriesData.length > 0) {
    +        const seriesId = seriesData[0].id;
    +
    +        // Find the affected season and episode numbers
    +        const affectedSeason = parseInt(inputs.issue.extra.find(item => item.name === 'Affected Season')?.value);
    +        const affectedEpisode = parseInt(inputs.issue.extra.find(item => item.name === 'Affected Episode')?.value);
    +        console.log("Season ID = " + affectedSeason);
    +        console.log("Episode ID = " + affectedEpisode);
    +
    +        // Get the history of the series
    +        const historyResponse = await fetch(`${sonarrBaseUrl}/history/series?seriesId=${seriesId}`, {
    +          method: 'GET',
    +          headers: {
    +            'X-Api-Key': sonarrApiKey,
    +          },
    +        });
    +
    +        if (historyResponse.ok) {
    +          const historyData = await historyResponse.json();
    +
    +          // Find the most recent entry that matches the affected season and episode
    +          const recentEntry = historyData.find(entry => {
    +            const sourceTitleMatch = /S(\d+)E(\d+)/.exec(entry.sourceTitle);
    +            if (sourceTitleMatch) {
    +              const sourceSeason = parseInt(sourceTitleMatch[1]);
    +              const sourceEpisode = parseInt(sourceTitleMatch[2]);
    +              return sourceSeason === affectedSeason && sourceEpisode === affectedEpisode;
    +            }
    +            return false;
    +          });
    +
    +          if (recentEntry) {
    +            const episodeId = recentEntry.episodeId;
    +            const id = recentEntry.id; // This is the ID you need for marking as failed
    +            console.log("Found Episode ID = " + episodeId);
    +            console.log("Found Most Recent Download ID = " + id);
    +
    +            // Use the episode ID to mark the episode as failed
    +            const markFailedUrl = `${sonarrBaseUrl}/history/failed/${id}`;
    +            console.log(`Calling Sonarr API to mark episode as failed: ${markFailedUrl}`);
    +
    +            const markFailedResponse = await fetch(markFailedUrl, {
    +              method: 'POST', 
    +              headers: {
    +                'X-Api-Key': sonarrApiKey,
    +              },
    +              body: JSON.stringify({ status: 'failed' }), 
    +            });
    +
    +            if (markFailedResponse.ok) {
    +              console.log('Episode successfully marked as failed in Sonarr.');
    +            } else {
    +              console.error(`Failed to mark episode as failed in Sonarr: ${markFailedResponse.statusText}`);
    +            }
    +          } else {
    +            console.error('No matching entry found in the series history for the affected episode.');
    +          }
    +        } else {
    +          console.error('Failed to fetch series history:', historyResponse.statusText);
    +        }
    +      } else {
    +        console.error('No series found for the provided TVDB ID:', tvdbId);
    +      }
    +    } else {
    +      console.error('Failed to fetch series data:', seriesResponse.statusText);
    +    }
    +  }
    +};
    +

    You may have to play around a bit and see if when you run this it auto searches for the file. My Sonarr does but my Radarr doesn’t, couldnt find any setting. Regardless I include a search command and even if Sonarr searches 2 times it appears 1 will cancel out. This is why no time delay between this code and file deletion.

    +

    #17 Delete File Sonarr

    +

    Again, all I do here is the code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code

    +
    export const code = async (inputs) => {
    +  const issueSubject = inputs.issue.subject;
    +  const tvShowNameRegex = /(.*)\s\((\d{4})\)/;
    +  const match = tvShowNameRegex.exec(issueSubject);
    +
    +  if (match) {
    +    const tvShowName = match[1];
    +    const year = match[2];
    +    const tvdbId = inputs.issue.media.tvdbId;
    +
    +    console.log(`TV Show name: ${tvShowName}`);
    +    console.log(`Year: ${year}`);
    +    console.log(`TVDB ID: ${tvdbId}`);
    +
    +    const sonarrApiKey = 'your-api-key';
    +    const sonarrBaseUrl = 'https://sonarr.example.com/api/v3';
    +
    +    const seriesResponse = await fetch(`${sonarrBaseUrl}/series/lookup?term=tvdb:${tvdbId}`, {
    +      method: 'GET',
    +      headers: {
    +        'X-Api-Key': sonarrApiKey,
    +      },
    +    });
    +
    +    if (seriesResponse.ok) {
    +      const seriesData = await seriesResponse.json();
    +
    +      if (seriesData.length > 0) {
    +        const seriesId = seriesData[0].id;
    +
    +        const affectedSeason = parseInt(inputs.issue.extra.find(item => item.name === 'Affected Season')?.value);
    +        const affectedEpisode = parseInt(inputs.issue.extra.find(item => item.name === 'Affected Episode')?.value);
    +
    +        const episodeFilesResponse = await fetch(`${sonarrBaseUrl}/episodefile?seriesId=${seriesId}`, {
    +          method: 'GET',
    +          headers: {
    +            'X-Api-Key': sonarrApiKey,
    +          },
    +        });
    +
    +        if (episodeFilesResponse.ok) {
    +          const episodeFilesData = await episodeFilesResponse.json();
    +
    +          const targetEpisode = episodeFilesData.find(episode => {
    +            const parsedPath = episode.relativePath.match(/S(\d+)E(\d+)/);
    +            if (parsedPath) {
    +              const episodeSeason = parseInt(parsedPath[1]);
    +              const episodeNumber = parseInt(parsedPath[2]);
    +              return episodeSeason === affectedSeason && episodeNumber === affectedEpisode;
    +            }
    +            return false;
    +          });
    +
    +          if (targetEpisode) {
    +            const targetEpisodeId = targetEpisode.id;
    +            console.log("Found Episode ID = " + targetEpisodeId);
    +
    +            // Delete the target episode file
    +            const deleteEpisodeUrl = `${sonarrBaseUrl}/episodefile/${targetEpisodeId}`;
    +            const deleteEpisodeResponse = await fetch(deleteEpisodeUrl, {
    +              method: 'DELETE',
    +              headers: {
    +                'X-Api-Key': sonarrApiKey,
    +              },
    +            });
    +
    +            if (deleteEpisodeResponse.ok) {
    +              console.log('Episode file successfully deleted in Sonarr.');
    +            } else {
    +              console.error(`Failed to delete episode file in Sonarr: ${deleteEpisodeResponse.statusText}`);
    +            }
    +          } else {
    +            console.error('No matching episode found in the episode files for the affected season and episode.');
    +          }
    +        } else {
    +          console.error('Failed to fetch episode files:', episodeFilesResponse.statusText);
    +        }
    +      } else {
    +        console.error('No series found for the provided TVDB ID:', tvdbId);
    +      }
    +    } else {
    +      console.error('Failed to fetch series data:', seriesResponse.statusText);
    +    }
    +  }
    +};
    +

    #18 Re-search in Sonarr

    +

    Again, all I do here is the code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code

    +
    export const code = async (inputs) => {
    +  const issueSubject = inputs.issue.subject;
    +  const tvShowNameRegex = /(.*)\s\((\d{4})\)/;
    +  const match = tvShowNameRegex.exec(issueSubject);
    +
    +  if (match) {
    +    const tvShowName = match[1];
    +    const year = match[2];
    +    const tvdbId = inputs.issue.media.tvdbId;
    +
    +    console.log(`TV Show name: ${tvShowName}`);
    +    console.log(`Year: ${year}`);
    +    console.log(`TVDB ID: ${tvdbId}`);
    +
    +    const sonarrApiKey = 'your-api-key';
    +    const sonarrBaseUrl = 'https://sonarr.example.com/api/v3';
    +
    +    const seriesResponse = await fetch(`${sonarrBaseUrl}/series/lookup?term=tvdb:${tvdbId}`, {
    +      method: 'GET',
    +      headers: {
    +        'X-Api-Key': sonarrApiKey,
    +      },
    +    });
    +
    +    if (seriesResponse.ok) {
    +      const seriesData = await seriesResponse.json();
    +
    +      if (seriesData.length > 0) {
    +        const seriesId = seriesData[0].id;
    +
    +        const affectedSeason = parseInt(inputs.issue.extra.find(item => item.name === 'Affected Season')?.value);
    +        const affectedEpisode = parseInt(inputs.issue.extra.find(item => item.name === 'Affected Episode')?.value);
    +
    +        const historyResponse = await fetch(`${sonarrBaseUrl}/history/series?seriesId=${seriesId}`, {
    +          method: 'GET',
    +          headers: {
    +            'X-Api-Key': sonarrApiKey,
    +          },
    +        });
    +
    +        if (historyResponse.ok) {
    +          const historyData = await historyResponse.json();
    +
    +          const recentEntry = historyData.find(entry => {
    +            const sourceTitleMatch = /S(\d+)E(\d+)/.exec(entry.sourceTitle);
    +            if (sourceTitleMatch) {
    +              const sourceSeason = parseInt(sourceTitleMatch[1]);
    +              const sourceEpisode = parseInt(sourceTitleMatch[2]);
    +              return sourceSeason === affectedSeason && sourceEpisode === affectedEpisode;
    +            }
    +            return false;
    +          });
    +
    +          if (recentEntry) {
    +            const episodeId = recentEntry.episodeId;
    +            console.log("Found Episode ID = " + episodeId);
    +
    +            // Perform the episode search
    +            const searchPayload = {
    +              name: 'EpisodeSearch',
    +              episodeIds: [episodeId],
    +            };
    +
    +            const searchResponse = await fetch(`${sonarrBaseUrl}/command`, {
    +              method: 'POST',
    +              headers: {
    +                'X-Api-Key': sonarrApiKey,
    +                'Content-Type': 'application/json',
    +              },
    +              body: JSON.stringify(searchPayload),
    +            });
    +
    +            if (searchResponse.ok) {
    +              console.log('Episode search command successfully sent to Sonarr.');
    +            } else {
    +              console.error(`Failed to send episode search command to Sonarr: ${searchResponse.statusText}`);
    +            }
    +          } else {
    +            console.error('No matching entry found in the series history for the affected episode.');
    +          }
    +        } else {
    +          console.error('Failed to fetch series history:', historyResponse.statusText);
    +        }
    +      } else {
    +        console.error('No series found for the provided TVDB ID:', tvdbId);
    +      }
    +    } else {
    +      console.error('Failed to fetch series data:', seriesResponse.statusText);
    +    }
    +  }
    +};
    +

    #19 Delay for 4 Minutes

    +

    Waiting for media to download and transfer.

    +

    #20 Add Comment/Resolve Issue

    +

    Again, all I do here is the code function with 1 input which is the whole body message of the request, this is assigned to inputs.issue in the code

    +
    export const code = async (inputs) => {
    +  const issueId = inputs.issue.issue_id;
    +  const apiKey = 'your-api-key'; // Replace with your actual API key
    +  const baseURL = 'https://jellyseerr.example.com/api/v1'
    +
    +  const commentApiUrl = `${baseURL}/issue/${issueId}/comment`;
    +  const statusApiUrl = `${baseURL}/issue/${issueId}/resolved`;
    +
    +  const headers = {
    +    'Content-Type': 'application/json',
    +    'X-Api-Key': apiKey,
    +  };
    +
    +  const commentData = {
    +    message: 'Your issue has been approved and a new version of the content has been automatically downloaded and updated in Jellyfin. Your issue has been set to Resolved. If you are still experiencing problems, re-open your issue.',
    +  };
    +
    +  const commentRequestOptions = {
    +    method: 'POST',
    +    headers: headers,
    +    body: JSON.stringify(commentData),
    +  };
    +
    +  try {
    +    // Post comment
    +    const commentResponse = await fetch(commentApiUrl, commentRequestOptions);
    +    const commentData = await commentResponse.json();
    +    console.log(commentData);
    +
    +    // Update status
    +    const statusRequestOptions = {
    +      method: 'POST',
    +      headers: headers,
    +    };
    +
    +    const statusResponse = await fetch(statusApiUrl, statusRequestOptions);
    +    const statusData = await statusResponse.json();
    +    console.log(statusData);
    +
    +    return true;
    +  } catch (error) {
    +    console.error(error);
    +    return false;
    +  }
    +};
    +

    #21 Same as #12

    +

    Conclusion

    +

    Once all this is done you can publish the flow and try it out!

    +

    If you have any feedback you can DM on Reddit. I’d love to see how you have edited this automation to your exact needs.

    +

    Now the hard part, getting your users to actually report the issues in Jellyseerr and not reach out to you!

    + + +
    + + +
    +
    + + + + + + + + + + + + + + + diff --git a/posts/index.html b/posts/index.html new file mode 100644 index 0000000..dfd035f --- /dev/null +++ b/posts/index.html @@ -0,0 +1,354 @@ + + + + + + + +Posts | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + + +
    +
    +

    Resize Ubuntu VM Disk in Proxmox +

    +
    +
    +

    1st step: Increase/resize disk from GUI console 2nd step: Extend physical drive partition and check free space with: sudo growpart /dev/sda 3 sudo pvdisplay sudo pvresize /dev/sda3 sudo pvdisplay 3rd step: Extend Logical volume sudo lvdisplay sudo lvextend -l +100%FREE /dev/ubuntu-vg/ubuntu-lv sudo lvdisplay 4th step: Resize Filesystem sudo resize2fs /dev/ubuntu-vg/ubuntu-lv sudo fdisk -l

    +
    +
    February 6, 2024 · 1 min · 53 words · Mafyuh
    + +
    + +
    +
    +

    Proton Mail - SimpleLogin authentik Social Login Setup +

    +
    +
    +

    This is just a quick guide on how to authenticate your authentik users with Proton using SimpleLogin OIDC. +To accomplish this, first create a SimpleLogin acct by logging in with Proton. Once thats done go to https://app.simplelogin.io/developer and create a website. Give it your authentik URL. +Then go to Oauth Settings and copy your client ID and secret for next step. add your authentik URL in redirect URL like this https://auth....

    +
    +
    November 12, 2023 · 2 min · 227 words · Mafyuh
    + +
    + +
    +
    +

    How To Automate Jellyfin Issue Handling +

    +
    +
    +

    I wanted a way to automate when users tell me a video on my Jellyfin server has an issue. After alot of trial and error, ChatGPT, Bard and I came up with this automation. +Requirements My only requirements when making this was that it was free and self-hostable. Not even any NPM extensions are required in AP. Actual Software requirements are: +Sonarr Radarr Overseerr/Jellyseerr Optional +SMTP server or ability to send SMTP messages (can also use discord) ActivePieces or any other automation platform that supports TS....

    +
    +
    November 11, 2023 · 19 min · 3952 words · Mafyuh
    + +
    + +
    +
    +

    How to authenticate Guacamole via authentik with Cloudflare and Nginx Proxy Manager +

    +
    +
    +

    authentik’s docs have a guide already for Guacamole. You can find that here. Follow all the instructions there, (especially the part where you create a user in Guacamole with the USERNAME of your email. not just filling in the email), but if you are using Cloudflare as our DNS you may run into problems. Such as infinite redirect loop. +Error 403 Forbidden While it was looping, I checked my Guacamole docker container logs in Portainer, and found the 403 Forbidden error....

    +
    +
    October 29, 2023 · 2 min · 344 words · Mafyuh
    + +
    + +
    +
    +

    How to authenticate Zammad via SAML with Nginx Proxy Manager +

    +
    +
    +

    If you are getting error messages like: +422: the change you wanted was rejected. message from saml: actioncontroller::invalidauthenticitytoken Just make sure you set these in your Nginx Proxy Manager hosts Advanced field: +location / { proxy_pass http://zammad:8080; # Replace proxy_set_header Host $host; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Ssl on; proxy_set_header X-Forwarded-Port $server_port; proxy_set_header X-Forwarded-Host $host; } I spent way too long trying to figure this out, reading through Github issues, breaking my SAML provider and Zammad configs, starting over, when the whole time it was just good old nginx header issues....

    +
    +
    October 29, 2023 · 1 min · 107 words · Mafyuh
    + +
    + +
    +
    +

    How To Authenticate KASM via authentik +

    +
    +
    +

    You could do this with OpenID as well but this method is using SAML. This guide assumes you already have running instances of Kasm Workspaces and authentik. +The official authentik docs dont have a Kasm Integration listed at the time. So I thought I would help out anyone who is trying to integrate these services via SAML. authentik’s SAML docs can be found here. +Setting up Kasm In the Kasm Workspaces admin, click Access Management - Authentication - SAML and create a new configuration....

    +
    +
    September 30, 2023 · 2 min · 393 words · Mafyuh
    + +
    + +
    +
    +

    Software +

    +
    +
    +

    Just a straight forward list of pretty much everything that makes up my homelab. Or systems I’ve ran in the past. +Operating Systems +Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Ubuntu doesnt play nice) Debian 11 Proxmox 8 Windows 10/11 TrueNAS Scale (virtualized) CasaOS (zimaboard) pfSense Applications/Containers +Nginx Proxy Manager Nginx Apache2 Traefik Authentik Portainer Yacht AdGuardHome Pihole Wazuh Zabbix Uptime Kuma Ghost (this blog) Wordpress Hydroxide (proton mail bridge) Calibre Smokeping Openspeedtest Grafana Prometheus InfluxDB PostgresSQL MySQL Watchtower Apache Guacamole Ansible Terraform Packer Vaultwarden Kasm Workspaces Jellyfin Plex Twingate Tailscale Headscale Wireguard LinkStack N8N Gotify Nextcloud Immich AI...

    +
    +
    August 26, 2023 · 1 min · 124 words · Mafyuh
    + +
    + +
    +
    +

    Archives +

    +
    +
    +

    archives

    +
    +
    0 min · 0 words · Mafyuh
    + +
    + +
    +
    +

    Search +

    +
    +
    +

    search

    +
    +
    0 min · 0 words · Mafyuh
    + +
    +
    + + + + + + + + + + + + + + diff --git a/posts/index.xml b/posts/index.xml new file mode 100644 index 0000000..28eae1d --- /dev/null +++ b/posts/index.xml @@ -0,0 +1,74 @@ + + + + Posts on Mafyuh's Blog + https://mafyuh.github.io/posts/ + Recent content in Posts on Mafyuh's Blog + Hugo -- gohugo.io + en + Tue, 06 Feb 2024 02:58:07 +0000 + + + Resize Ubuntu VM Disk in Proxmox + https://mafyuh.github.io/posts/resize-ubuntu-vm-disk/ + Tue, 06 Feb 2024 02:58:07 +0000 + https://mafyuh.github.io/posts/resize-ubuntu-vm-disk/ + 1st step: Increase/resize disk from GUI console 2nd step: Extend physical drive partition and check free space with: sudo growpart /dev/sda 3 sudo pvdisplay sudo pvresize /dev/sda3 sudo pvdisplay 3rd step: Extend Logical volume sudo lvdisplay sudo lvextend -l +100%FREE /dev/ubuntu-vg/ubuntu-lv sudo lvdisplay 4th step: Resize Filesystem sudo resize2fs /dev/ubuntu-vg/ubuntu-lv sudo fdisk -l + + + Proton Mail - SimpleLogin authentik Social Login Setup + https://mafyuh.github.io/posts/proton-mail-authentik-social-login-setup/ + Sun, 12 Nov 2023 16:20:00 +0000 + https://mafyuh.github.io/posts/proton-mail-authentik-social-login-setup/ + This is just a quick guide on how to authenticate your authentik users with Proton using SimpleLogin OIDC. +To accomplish this, first create a SimpleLogin acct by logging in with Proton. Once thats done go to https://app.simplelogin.io/developer and create a website. Give it your authentik URL. +Then go to Oauth Settings and copy your client ID and secret for next step. add your authentik URL in redirect URL like this https://auth. + + + How To Automate Jellyfin Issue Handling + https://mafyuh.github.io/posts/how-to-automate-jellyfin-issue-handling/ + Sat, 11 Nov 2023 16:20:00 +0000 + https://mafyuh.github.io/posts/how-to-automate-jellyfin-issue-handling/ + I wanted a way to automate when users tell me a video on my Jellyfin server has an issue. After alot of trial and error, ChatGPT, Bard and I came up with this automation. +Requirements My only requirements when making this was that it was free and self-hostable. Not even any NPM extensions are required in AP. Actual Software requirements are: +Sonarr Radarr Overseerr/Jellyseerr Optional +SMTP server or ability to send SMTP messages (can also use discord) ActivePieces or any other automation platform that supports TS. + + + How to authenticate Guacamole via authentik with Cloudflare and Nginx Proxy Manager + https://mafyuh.github.io/posts/how-to-authenticate-guacamole-authentik-nginxproxymanager/ + Sun, 29 Oct 2023 16:20:00 +0000 + https://mafyuh.github.io/posts/how-to-authenticate-guacamole-authentik-nginxproxymanager/ + authentik&rsquo;s docs have a guide already for Guacamole. You can find that here. Follow all the instructions there, (especially the part where you create a user in Guacamole with the USERNAME of your email. not just filling in the email), but if you are using Cloudflare as our DNS you may run into problems. Such as infinite redirect loop. +Error 403 Forbidden While it was looping, I checked my Guacamole docker container logs in Portainer, and found the 403 Forbidden error. + + + How to authenticate Zammad via SAML with Nginx Proxy Manager + https://mafyuh.github.io/posts/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/ + Sun, 29 Oct 2023 16:20:00 +0000 + https://mafyuh.github.io/posts/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/ + If you are getting error messages like: +422: the change you wanted was rejected. message from saml: actioncontroller::invalidauthenticitytoken Just make sure you set these in your Nginx Proxy Manager hosts Advanced field: +location / { proxy_pass http://zammad:8080; # Replace proxy_set_header Host $host; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Ssl on; proxy_set_header X-Forwarded-Port $server_port; proxy_set_header X-Forwarded-Host $host; } I spent way too long trying to figure this out, reading through Github issues, breaking my SAML provider and Zammad configs, starting over, when the whole time it was just good old nginx header issues. + + + How To Authenticate KASM via authentik + https://mafyuh.github.io/posts/how-to-authenticate-kasm-via-authentik/ + Sat, 30 Sep 2023 16:20:00 +0000 + https://mafyuh.github.io/posts/how-to-authenticate-kasm-via-authentik/ + You could do this with OpenID as well but this method is using SAML. This guide assumes you already have running instances of Kasm Workspaces and authentik. +The official authentik docs dont have a Kasm Integration listed at the time. So I thought I would help out anyone who is trying to integrate these services via SAML. authentik&rsquo;s SAML docs can be found here. +Setting up Kasm In the Kasm Workspaces admin, click Access Management - Authentication - SAML and create a new configuration. + + + Software + https://mafyuh.github.io/posts/software/ + Sat, 26 Aug 2023 00:13:40 +0000 + https://mafyuh.github.io/posts/software/ + Just a straight forward list of pretty much everything that makes up my homelab. Or systems I&rsquo;ve ran in the past. +Operating Systems +Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Ubuntu doesnt play nice) Debian 11 Proxmox 8 Windows 10/11 TrueNAS Scale (virtualized) CasaOS (zimaboard) pfSense Applications/Containers +Nginx Proxy Manager Nginx Apache2 Traefik Authentik Portainer Yacht AdGuardHome Pihole Wazuh Zabbix Uptime Kuma Ghost (this blog) Wordpress Hydroxide (proton mail bridge) Calibre Smokeping Openspeedtest Grafana Prometheus InfluxDB PostgresSQL MySQL Watchtower Apache Guacamole Ansible Terraform Packer Vaultwarden Kasm Workspaces Jellyfin Plex Twingate Tailscale Headscale Wireguard LinkStack N8N Gotify Nextcloud Immich AI + + + diff --git a/posts/page/1/index.html b/posts/page/1/index.html new file mode 100644 index 0000000..34c4fcd --- /dev/null +++ b/posts/page/1/index.html @@ -0,0 +1,10 @@ + + + + https://mafyuh.github.io/posts/ + + + + + + diff --git a/posts/proton-mail-authentik-social-login-setup/index.html b/posts/proton-mail-authentik-social-login-setup/index.html new file mode 100644 index 0000000..933fd8c --- /dev/null +++ b/posts/proton-mail-authentik-social-login-setup/index.html @@ -0,0 +1,369 @@ + + + + + + + +Proton Mail - SimpleLogin authentik Social Login Setup | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    +
    + +

    + Proton Mail - SimpleLogin authentik Social Login Setup +

    + +
    + +

    This is just a quick guide on how to authenticate your authentik users with Proton using SimpleLogin OIDC.

    +

    Proton Authentik Login Screen +

    +

    To accomplish this, first create a SimpleLogin acct by logging in with Proton. Once thats done go to https://app.simplelogin.io/developer and create a website. Give it your authentik URL.

    +

    Then go to Oauth Settings and copy your client ID and secret for next step. add your authentik URL in redirect URL like this https://auth.example.com/source/oauth/callback/simplelogin/ (simplelogin being slug of authentik)

    +

    In authentik go to Directory - Federation and Social login - Create and create an OpenID Oauth source

    +

    Name: SimpleLogin +Slug: simplelogin +User matching mode: i chose link with identical email +Consumer key: Paste your key +Consumer secret: Paste your secret +authorization url: https://app.simplelogin.io/oauth2/authorize +access token url: https://app.simplelogin.io/oauth2/token +profile url: https://app.simplelogin.io/oauth2/userinfo +OIDC Well-known URL: https://app.simplelogin.io/.well-known/openid-configuration

    +

    For logo, it appears authenik inverts your image, I dont know if its dark mode or bug but regardless here’s the regular and inverted image I used. Just right click and save image:

    +

    Proton Logo + +Proton Logo Inverted +

    +

    Now go to Flows and Stages - Flows - choose your default authentication stage - click it then click stage bindings - Click edit stage to the right of your identification stage - expand Source settings and make sure you CTL + click your newly created SimpleLogin source.

    +

    You should be able to logout and try to to login with your Proton account!

    + + +
    + + +
    +
    + + + + + + + + + + + + + + + diff --git a/posts/resize-ubuntu-vm-disk/index.html b/posts/resize-ubuntu-vm-disk/index.html new file mode 100644 index 0000000..c6bae5e --- /dev/null +++ b/posts/resize-ubuntu-vm-disk/index.html @@ -0,0 +1,360 @@ + + + + + + + +Resize Ubuntu VM Disk in Proxmox | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    +
    + +

    + Resize Ubuntu VM Disk in Proxmox +

    + +
    +
    + + Table of Contents + + +
    +
    +
    +
    + +

    1st step: Increase/resize disk from GUI console

    +

    Proxmox webui change +

    +

    2nd step: Extend physical drive partition and check free space with:

    +
    sudo growpart /dev/sda 3
    +
    sudo pvdisplay
    +
    sudo pvresize /dev/sda3
    +
    sudo pvdisplay
    +

    3rd step: Extend Logical volume

    +
    sudo lvdisplay
    +
    sudo lvextend -l +100%FREE /dev/ubuntu-vg/ubuntu-lv
    +
    sudo lvdisplay
    +

    4th step: Resize Filesystem

    +
    sudo resize2fs /dev/ubuntu-vg/ubuntu-lv
    +
    sudo fdisk -l
    +
    + +
    + + +
    +
    + + + + + + + + + + + + + + + diff --git a/posts/search/index.html b/posts/search/index.html new file mode 100644 index 0000000..0d7b92c --- /dev/null +++ b/posts/search/index.html @@ -0,0 +1,277 @@ + + + + + + + +Search | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + + + + +
    + + + + + + + + + + + + + + diff --git a/posts/software/index.html b/posts/software/index.html new file mode 100644 index 0000000..fa32f12 --- /dev/null +++ b/posts/software/index.html @@ -0,0 +1,429 @@ + + + + + + + +Software | Mafyuh's Blog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    +
    + +
    +
    + +

    + Software +

    + +
    + +

    Just a straight forward list of pretty much everything that makes up my homelab. Or systems I’ve ran in the past.

    +

    Operating Systems

    +
      +
    • Ubuntu 23.04
    • +
    • Ubuntu 22.04 (primary on most systems)
    • +
    • CentOS/Fedora 38 (only when Ubuntu doesnt play nice)
    • +
    • Debian 11
    • +
    • Proxmox 8
    • +
    • Windows 10/11
    • +
    • TrueNAS Scale (virtualized)
    • +
    • CasaOS (zimaboard)
    • +
    • pfSense
    • +
    +

    Applications/Containers

    + +

    AI

    +
      +
    • GPT4ALL
    • +
    • Stable Diffusion
    • +
    • LocalAI
    • +
    • Auto-GPT
    • +
    • Comfy UI
    • +
    +

    Arr Suite

    +
      +
    • Radarr
    • +
    • Sonarr
    • +
    • Prowlarr
    • +
    • Lidarr
    • +
    • Jellyseer
    • +
    • Tdarr
    • +
    • Requesterr
    • +
    • Real Debrid Client
    • +
    • Wizarr
    • +
    + + +
    + + +
    +
    + + + + + + + + + + + + + + + diff --git a/sitemap.xml b/sitemap.xml index f3ba5a4..502261e 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -5,25 +5,25 @@ https://mafyuh.github.io/ 2024-02-06T02:58:07+00:00 - https://mafyuh.github.io/post/ + https://mafyuh.github.io/posts/ 2024-02-06T02:58:07+00:00 - https://mafyuh.github.io/post/resize-ubuntu-vm-disk/ + https://mafyuh.github.io/posts/resize-ubuntu-vm-disk/ 2024-02-06T02:58:07+00:00 - https://mafyuh.github.io/post/proton-mail-authentik-social-login-setup/ + https://mafyuh.github.io/posts/proton-mail-authentik-social-login-setup/ 2023-11-12T16:20:00+00:00 - https://mafyuh.github.io/post/how-to-automate-jellyfin-issue-handling/ + https://mafyuh.github.io/posts/how-to-automate-jellyfin-issue-handling/ 2023-11-11T16:20:00+00:00 - https://mafyuh.github.io/post/how-to-authenticate-guacamole-authentik-nginxproxymanager/ + https://mafyuh.github.io/posts/how-to-authenticate-guacamole-authentik-nginxproxymanager/ 2023-10-29T16:20:00+00:00 - https://mafyuh.github.io/post/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/ + https://mafyuh.github.io/posts/how-to-authenticate-zammad-via-saml-with-nginx-proxy-manager/ 2023-10-29T16:20:00+00:00 - https://mafyuh.github.io/post/how-to-authenticate-kasm-via-authentik/ + https://mafyuh.github.io/posts/how-to-authenticate-kasm-via-authentik/ 2023-09-30T16:20:00+00:00 https://mafyuh.github.io/categories/ @@ -41,10 +41,14 @@ https://mafyuh.github.io/categories/import-2023-09-08-0216/ 2023-08-26T00:13:40+00:00 - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ 2023-08-26T00:13:40+00:00 https://mafyuh.github.io/tags/ 2023-08-26T00:13:40+00:00 + + https://mafyuh.github.io/archives/ + + https://mafyuh.github.io/posts/search/ diff --git a/tags/import-2023-08-26-0406/index.html b/tags/import-2023-08-26-0406/index.html index 0327392..226ce54 100644 --- a/tags/import-2023-08-26-0406/index.html +++ b/tags/import-2023-08-26-0406/index.html @@ -12,8 +12,8 @@ - - + + @@ -117,8 +117,13 @@
  • - - Tags + + Archives + +
  • +
  • + + Search
  • @@ -151,7 +156,7 @@ Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Nginx Proxy Manager Nginx Apache2 Traefik Authentik Portainer Yacht AdGuardHome Pihole Wazuh Zabbix Uptime Kuma Ghost (this blog) Wordpress Hydroxide (proton mail bridge) Calibre Smokeping Openspeedtest Grafana Prometheus InfluxDB PostgresSQL MySQL Watchtower Apache Guacamole Ansible Terraform Packer Vaultwarden Kasm Workspaces Jellyfin Plex Twingate Tailscale Headscale Wireguard LinkStack N8N Gotify Nextcloud Immich AI...

    August 26, 2023 · 1 min · 124 words · Mafyuh
    - + diff --git a/tags/import-2023-08-26-0406/index.xml b/tags/import-2023-08-26-0406/index.xml index 149c079..4beb9e4 100644 --- a/tags/import-2023-08-26-0406/index.xml +++ b/tags/import-2023-08-26-0406/index.xml @@ -10,9 +10,9 @@ Software - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Sat, 26 Aug 2023 00:13:40 +0000 - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Just a straight forward list of pretty much everything that makes up my homelab. Or systems I&rsquo;ve ran in the past. Operating Systems Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Ubuntu doesnt play nice) Debian 11 Proxmox 8 Windows 10/11 TrueNAS Scale (virtualized) CasaOS (zimaboard) pfSense Applications/Containers diff --git a/tags/import-2023-09-08-0216/index.html b/tags/import-2023-09-08-0216/index.html index 2405297..d1e80ba 100644 --- a/tags/import-2023-09-08-0216/index.html +++ b/tags/import-2023-09-08-0216/index.html @@ -12,8 +12,8 @@ - - + + @@ -117,8 +117,13 @@
  • - - Tags + + Archives + +
  • +
  • + + Search
  • @@ -151,7 +156,7 @@ Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Nginx Proxy Manager Nginx Apache2 Traefik Authentik Portainer Yacht AdGuardHome Pihole Wazuh Zabbix Uptime Kuma Ghost (this blog) Wordpress Hydroxide (proton mail bridge) Calibre Smokeping Openspeedtest Grafana Prometheus InfluxDB PostgresSQL MySQL Watchtower Apache Guacamole Ansible Terraform Packer Vaultwarden Kasm Workspaces Jellyfin Plex Twingate Tailscale Headscale Wireguard LinkStack N8N Gotify Nextcloud Immich AI...

    August 26, 2023 · 1 min · 124 words · Mafyuh
    - + diff --git a/tags/import-2023-09-08-0216/index.xml b/tags/import-2023-09-08-0216/index.xml index b16bcc6..f69ea30 100644 --- a/tags/import-2023-09-08-0216/index.xml +++ b/tags/import-2023-09-08-0216/index.xml @@ -10,9 +10,9 @@ Software - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Sat, 26 Aug 2023 00:13:40 +0000 - https://mafyuh.github.io/post/software/ + https://mafyuh.github.io/posts/software/ Just a straight forward list of pretty much everything that makes up my homelab. Or systems I&rsquo;ve ran in the past. Operating Systems Ubuntu 23.04 Ubuntu 22.04 (primary on most systems) CentOS/Fedora 38 (only when Ubuntu doesnt play nice) Debian 11 Proxmox 8 Windows 10/11 TrueNAS Scale (virtualized) CasaOS (zimaboard) pfSense Applications/Containers diff --git a/tags/index.html b/tags/index.html index 09eff63..43c620a 100644 --- a/tags/index.html +++ b/tags/index.html @@ -12,8 +12,8 @@ - - + + @@ -117,8 +117,13 @@
  • - - Tags + + Archives + +
  • +
  • + + Search